00001
00002
00003
00004
00005
00006
00007
00008
00009 #include <stdlib.h>
00010 #include <fcntl.h>
00011 #include <unistd.h>
00012 #include <ctype.h>
00013 #include <sys/select.h>
00014
00015
00016 #include "url.h"
00017 #include "waa.h"
00018 #include "cache.h"
00019 #include "helper.h"
00020 #include "est_ops.h"
00021 #include "checksum.h"
00022 #include "racallback.h"
00023
00024
00225 char **url__parm_list=NULL;
00226 int url__parm_list_len=0,
00227 url__parm_list_used=0;
00228
00229
00230
00237 int url__find_by_name(const char *name, struct url_t **storage)
00238 {
00239 int status;
00240 int i;
00241
00242
00243 if (name && !*name) name=NULL;
00244
00245 status=EADDRNOTAVAIL;
00246 for(i=0; i<urllist_count; i++)
00247 {
00248
00249 if (!urllist[i]->name ?
00250 (!name || !*name) :
00251 (strcmp(urllist[i]->name, name) == 0) )
00252 {
00253 if (storage) *storage=urllist[i];
00254 status=0;
00255 break;
00256 }
00257 }
00258
00259 if (status)
00260 DEBUGP("url with name %s not found!", name);
00261
00262 return status;
00263 }
00264
00265
00272 int url__find_by_url_in_list(char *url,
00273 struct url_t **list, int count,
00274 struct url_t **storage)
00275 {
00276 int status;
00277 int i;
00278
00279
00280 status=EADDRNOTAVAIL;
00281 for(i=0; i<count; i++)
00282 {
00283 if (strcmp(list[i]->url, url) == 0)
00284 {
00285 if (storage) *storage=list[i];
00286 status=0;
00287 break;
00288 }
00289 }
00290
00291 if (status)
00292 DEBUGP("url with url %s not found!", url);
00293
00294 return status;
00295 }
00296
00297
00299 int url__find_by_url(char *url, struct url_t **storage)
00300 {
00301 return url__find_by_url_in_list(url, urllist, urllist_count, storage);
00302 }
00303
00306 int url__find_by_intnum(int intnum, struct url_t **storage)
00307 {
00308 int status;
00309 int i;
00310
00311
00312
00313
00314 status=EADDRNOTAVAIL;
00315 for(i=0; i<urllist_count; i++)
00316 {
00317 if (urllist[i]->internal_number == intnum)
00318 {
00319 if (storage) *storage=urllist[i];
00320 status=0;
00321 break;
00322 }
00323 }
00324
00325 if (status)
00326 DEBUGP("url with intnum %d not found!", intnum);
00327 else
00328 DEBUGP("url with intnum %d is %s", intnum, (*storage)->url);
00329
00330 return status;
00331 }
00332
00333
00337 #define HAVE_NAME (1)
00338 #define HAVE_PRIO (2)
00339 #define HAVE_URL (4)
00340 #define HAVE_TARGET (8)
00341 #define HAVE_READONLY (16)
00342
00354 int url__parse(char *input, struct url_t *storage, int *def_parms)
00355 {
00356 int status;
00357 char *cp, *value, *end, *cur;
00358 struct url_t eurl;
00359 int nlen, vlen, have_seen;
00360
00361
00362 status=0;
00363
00364 have_seen=0;
00365 memset(&eurl, 0, sizeof(eurl));
00366
00367
00368
00369 eurl.internal_number=INVALID_INTERNAL_NUMBER;
00370 eurl.current_rev=0;
00371 eurl.target_rev=SVN_INVALID_REVNUM;
00372 eurl.current_target_override=0;
00373 eurl.head_rev=SVN_INVALID_REVNUM;
00374 cur=input;
00375
00376 DEBUGP("input: %s", input);
00377 while (! (have_seen & HAVE_URL))
00378 {
00379
00380
00381
00382
00383
00384
00385
00386
00387
00388
00389 end=cur;
00390 value=NULL;
00391 while (*end)
00392 {
00393
00394
00395
00396 if (*end == ':' && !value)
00397 value = end+1;
00398
00399 if (*end == ',') break;
00400
00401 end++;
00402 }
00403
00404
00405 nlen = (value ? value-1 : end) - cur;
00406 vlen = value ? end - value : 0;
00407
00408 DEBUGP("cur=%s value=%s end=%s vlen=%d nlen=%d",
00409 cur, value, end, vlen, nlen);
00410
00411 if (strncmp("name", cur, nlen) == 0 ||
00412 strncmp("N", cur, nlen) == 0)
00413 {
00414 STOPIF_CODE_ERR( have_seen & HAVE_NAME, EINVAL,
00415 "!Found two names in URL '%s'; only one may be given.",
00416 input);
00417 if (!value) goto need_value;
00418
00419
00420 if (vlen==0)
00421 DEBUGP("NULL name");
00422 else if (storage)
00423 {
00424
00425
00426
00427 STOPIF( hlp__strnalloc(vlen, &eurl.name, value), NULL);
00428
00429 DEBUGP("got a name '%s' (%d bytes), going on with '%s'",
00430 eurl.name, vlen, end);
00431 have_seen |= HAVE_NAME;
00432 }
00433 }
00434 else if (strncmp("target", cur, nlen) == 0 ||
00435 strncmp("T", cur, nlen) == 0)
00436 {
00437 STOPIF_CODE_ERR( have_seen & HAVE_TARGET, EINVAL,
00438 "!Already got a target revision in URL '%s'.",
00439 input);
00440 if (!value) goto need_value;
00441 STOPIF( hlp__parse_rev( value, &cp, & eurl.target_rev), NULL);
00442 STOPIF_CODE_ERR( cp == value || cp != end, EINVAL,
00443 "The given target revision in '%s' is invalid.",
00444 input);
00445 DEBUGP("got target %s", hlp__rev_to_string(eurl.target_rev));
00446 have_seen |= HAVE_TARGET;
00447 }
00448 else if (strncmp("prio", cur, nlen) == 0 ||
00449 strncmp("P", cur, nlen) == 0)
00450 {
00451 STOPIF_CODE_ERR( have_seen & HAVE_PRIO, EINVAL,
00452 "!Found two priorities in URL '%s'; only one allowed.",
00453 input);
00454 if (!value) goto need_value;
00455 eurl.priority=strtol(value, &cp, 0);
00456 STOPIF_CODE_ERR( cp == value || cp != end, EINVAL,
00457 "!The given url \"%s\" is invalid; cannot parse the priority.",
00458 input);
00459 DEBUGP("got priority %d", eurl.priority);
00460 have_seen |= HAVE_PRIO;
00461 }
00462 else if (strncmp("readonly", cur, nlen) == 0 ||
00463 strncmp("ro", cur, nlen) == 0)
00464 {
00465 STOPIF_CODE_ERR( have_seen & HAVE_READONLY, EINVAL,
00466 "!Found two readonly flags in URL \"%s\"; only one allowed.",
00467 input);
00468 if (value)
00469 {
00470 eurl.is_readonly=strtol(value, &cp, 0);
00471 STOPIF_CODE_ERR( cp == value || cp != end, EINVAL,
00472 "!Cannot parse the readonly flag in \"%s\".", input);
00473 }
00474 else
00475 eurl.is_readonly=1;
00476
00477 have_seen |= HAVE_READONLY;
00478 }
00479 else
00480 {
00481
00482
00483
00484
00485
00486 nlen++;
00487 if ((nlen == 4 && strncmp("svn:", cur, nlen) == 0) ||
00488 (nlen == 5 &&
00489 (strncmp("http:", cur, nlen) == 0 ||
00490 strncmp("file:", cur, nlen) == 0) ) ||
00491 (nlen == 6 && strncmp("https:", cur, nlen) == 0) ||
00492 (nlen == 8 && strncmp("svn+ssh:", cur, nlen) == 0) )
00493 DEBUGP("known protocol found");
00494 else
00495 STOPIF_CODE_ERR(1, EINVAL,
00496 "!The protocol given in \"%s\" is unknown!", cur);
00497
00498
00499
00500 if (!value || vlen<3 || strncmp(value, "//", 2)!=0)
00501 STOPIF_CODE_ERR(1, EINVAL, "!The URL in \"%s\" is invalid.", cur);
00502
00503
00504
00505
00506
00507
00508
00509
00510
00511 while (vlen>3 && value[vlen-1] == '/')
00512 value[--vlen] = 0;
00513
00514
00515
00516
00517 eurl.urllen=nlen + 0 + 1 + vlen - 1;
00518 STOPIF( hlp__strdup( &eurl.url, cur), NULL);
00519
00520 have_seen |= HAVE_URL;
00521 }
00522
00523 while (*end == ',') end++;
00524 if (!*end) break;
00525 cur=end;
00526 }
00527
00528
00529 if (def_parms)
00530 *def_parms=have_seen;
00531 else
00532 STOPIF_CODE_ERR( !(have_seen & HAVE_URL), EINVAL,
00533 "!No URL found in %s", input);
00534
00535 if (storage) *storage=eurl;
00536
00537 ex:
00538 return status;
00539
00540 need_value:
00541 STOPIF(EINVAL,
00542 "!Specification '%s' is not a valid URL - ':' missing.", input);
00543 goto ex;
00544 }
00545
00546
00558 int url__insert_or_replace(char *eurl,
00559 struct url_t **storage,
00560 int *existed)
00561 {
00562 int status;
00563 int seen;
00564 struct url_t target, *dupl, *dest, *by_name;
00565
00566
00567 status=0;
00568 STOPIF( url__parse(eurl, &target, &seen), NULL);
00569
00570
00571 by_name=NULL;
00572
00573 if (seen & HAVE_NAME)
00574 url__find_by_name(target.name, &by_name);
00575
00576 dupl=NULL;
00577
00578 if (seen & HAVE_URL)
00579 url__find_by_url(target.url, &dupl);
00580 else
00581 {
00582
00583 dupl=by_name;
00584 }
00585
00586
00587 if (!dupl)
00588 {
00589 if (!(seen & HAVE_URL))
00590 {
00591 STOPIF( EINVAL,
00592 !(seen & HAVE_NAME) ?
00593 "!No URL was given in \"%s\"." :
00594 "!Cannot find the name given in \"%s\", so cannot modify an URL.",
00595 eurl);
00596 }
00597 if (seen & HAVE_NAME)
00598 {
00599
00600 STOPIF_CODE_ERR( by_name, EADDRINUSE,
00601 "!There's already an url named \"%s\"", target.name);
00602
00603
00604 status=0;
00605 }
00606
00607
00608 dest=urllist[urllist_count];
00609 *dest = target;
00610 urllist_count++;
00611 }
00612 else
00613 {
00614
00615
00616
00617
00618
00619
00620 if (seen & HAVE_TARGET)
00621 dupl->target_rev = target.target_rev;
00622 if (seen & HAVE_PRIO)
00623 dupl->priority = target.priority;
00624 if (seen & HAVE_READONLY)
00625 dupl->is_readonly = target.is_readonly;
00626 if (seen & HAVE_NAME)
00627 dupl->name = target.name;
00628
00629
00630 dest=dupl;
00631 }
00632
00633 if (existed)
00634 *existed = dupl ? EEXIST : 0;
00635
00636 if (storage)
00637 *storage=dest;
00638
00639 ex:
00640 return status;
00641 }
00642
00643
00647 int find_next_zero_bit(fd_set *fd, int from)
00648 {
00649 while (FD_ISSET(from, fd)) from++;
00650 return from;
00651 }
00652
00653
00665 int url___set_internal_nums(void)
00666 {
00667 int status;
00668 int i, j, bit;
00669 fd_set bitmap;
00670
00671
00672
00673
00674
00675 STOPIF_CODE_ERR( sizeof(bitmap)*8 < urllist_count, EMFILE,
00676 "Your fd_set is too small for the number of urls.\n"
00677 "Please contact dev@fsvs.tigris.org for help.");
00678
00679 status=0;
00680 FD_ZERO(&bitmap);
00681
00682 for(i=0; i<urllist_count; i++)
00683 {
00684 if (urllist[i]->internal_number > urllist_count)
00685 {
00686
00687
00688 for(j=i+1; j<urllist_count; j++)
00689 STOPIF_CODE_ERR(
00690 urllist[i]->internal_number == urllist[j]->internal_number,
00691 EINVAL, "The URLs %s and %s have identical internal numbers!",
00692 urllist[i]->url, urllist[j]->url);
00693 }
00694 else if (urllist[i]->internal_number != INVALID_INTERNAL_NUMBER)
00695 {
00696 STOPIF_CODE_ERR( FD_ISSET(urllist[i]->internal_number, &bitmap),
00697 EINVAL,
00698 "The URL %s has a duplicate internal number!",
00699 urllist[i]->url);
00700
00701 FD_SET(urllist[i]->internal_number, &bitmap);
00702 }
00703 }
00704
00705
00706 bit=1;
00707 for(i=0; i<urllist_count; i++)
00708 {
00709 DEBUGP("inum for %s is %d",
00710 urllist[i]->url, urllist[i]->internal_number);
00711 if (urllist[i]->internal_number == INVALID_INTERNAL_NUMBER)
00712 {
00713
00714 bit= find_next_zero_bit(&bitmap, bit);
00715 DEBUGP("found a free bit for %s: %d",
00716 urllist[i]->url, bit);
00717
00718 urllist[i]->internal_number=bit;
00719
00720
00721 bit++;
00722 }
00723 }
00724
00725 ex:
00726 return status;
00727 }
00728
00729
00731 int url__allocate(int reserve_space)
00732 {
00733 int status;
00734 struct url_t *url_mem;
00735 int i;
00736
00737
00738 status=0;
00739
00740 STOPIF( hlp__realloc( &urllist,
00741 sizeof(*urllist) * (urllist_count+1+reserve_space)), NULL);
00742 STOPIF( hlp__calloc( &url_mem, sizeof(*url_mem), reserve_space), NULL);
00743
00744
00745 for(i=0; i<reserve_space; i++)
00746 {
00747 urllist[urllist_count+i]=url_mem+i;
00748 }
00749 urllist[urllist_count+i]=NULL;
00750
00751 ex:
00752 return status;
00753 }
00754
00755
00757 int url__indir_sorter(const void *a, const void *b)
00758 {
00759 struct url_t *u1=*(struct url_t **)a,
00760 *u2=*(struct url_t **)b;
00761
00762 return url__sorter(u1, u2);
00763 }
00764
00765
00776 int url__load_list(char *dir, int reserve_space)
00777 {
00778 int status, fh, l, i;
00779 struct stat64 st;
00780 char *urllist_mem;
00781 int inum, cnt, new_count;
00782 svn_revnum_t rev;
00783 struct url_t *target;
00784
00785
00786 fh=-1;
00787 urllist_mem=NULL;
00788
00789
00790
00791 status=waa__open_byext(dir, WAA__URLLIST_EXT, WAA__READ, &fh);
00792 if (status==ENOENT)
00793 {
00794 STOPIF( url__allocate(reserve_space), NULL);
00795 status=ENOENT;
00796 goto ex;
00797 }
00798
00799 STOPIF_CODE_ERR(status, status, "Cannot read URL list");
00800
00801 STOPIF_CODE_ERR( fstat64(fh, &st) == -1, errno,
00802 "fstat() of url-list");
00803
00804
00805 STOPIF( hlp__alloc( &urllist_mem, st.st_size+1), NULL);
00806
00807 status=read(fh, urllist_mem, st.st_size);
00808 STOPIF_CODE_ERR( status != st.st_size, errno,
00809 "error reading url-list");
00810
00811 urllist_mem[st.st_size]=0;
00812
00813
00814 new_count=0;
00815 for(l=0; l<st.st_size; )
00816 {
00817 while (isspace(urllist_mem[l])) l++;
00818
00819 if (urllist_mem[l]) new_count++;
00820 l += strlen(urllist_mem+l)+1;
00821 }
00822
00823 DEBUGP("found %d urls", new_count);
00824 STOPIF( url__allocate(reserve_space+new_count), NULL);
00825
00826
00827 for(l=i=0; i<new_count; )
00828 {
00829
00830
00831 while (isspace(urllist_mem[l])) l++;
00832
00833 DEBUGP("url %d of %d: %s",i, new_count, urllist_mem+l);
00834 if (urllist_mem[l])
00835 {
00836 STOPIF_CODE_ERR(
00837 sscanf(urllist_mem+l, "%d %ld %n",
00838 &inum, &rev, &cnt) != 2,
00839 EINVAL,
00840 "Cannot parse urllist line '%s'", urllist_mem+l);
00841
00842 STOPIF( url__insert_or_replace(urllist_mem+l+cnt, &target, NULL), NULL);
00843 target->internal_number=inum;
00844 target->current_rev=rev;
00845
00846 i++;
00847 l += strlen(urllist_mem+l);
00848 }
00849
00850
00851 l++;
00852 }
00853
00854
00855 qsort(urllist, urllist_count, sizeof(*urllist), url__indir_sorter);
00856
00857 ex:
00858
00859
00860 if (fh!=-1)
00861 {
00862 l=close(fh);
00863 STOPIF_CODE_ERR(l == -1 && !status, errno, "closing the url-list");
00864 }
00865
00866 return status;
00867 }
00868
00869
00873 int url__load_nonempty_list(char *dir, int reserve_space)
00874 {
00875 int status, load_st;
00876
00877 status=0;
00878 if (!dir) dir=wc_path;
00879
00880 load_st=url__load_list(dir, reserve_space);
00881 STOPIF_CODE_ERR( load_st==ENOENT ||
00882 urllist_count==0, ENOENT,
00883 "!No URLs have been defined for %s.", dir);
00884
00885 ex:
00886 return status;
00887 }
00888
00889
00893 int url__output_list(void)
00894 {
00895 int status, i, fh, l;
00896 char buffer[1024];
00897 struct url_t *url;
00898
00899
00900 fh=-1;
00901
00902 STOPIF( url___set_internal_nums(),
00903 "Setting the internal numbers failed.");
00904
00905 STOPIF( waa__open_byext(NULL, WAA__URLLIST_EXT, WAA__WRITE, &fh), NULL);
00906 for(i=0; i<urllist_count; i++)
00907 {
00908 url=urllist[i];
00909
00910 l=snprintf(buffer, sizeof(buffer),
00911 "%d %ld T:%ld,N:%s,P:%d,ro:%u,%s",
00912 url->internal_number,
00913 url->current_rev,
00914 url->target_rev,
00915 url->name ? url->name : "",
00916 url->priority,
00917 url->is_readonly,
00918 url->url);
00919
00920 STOPIF_CODE_ERR( l > sizeof(buffer)-4, E2BIG,
00921 "You've got too long URLs; I'd need %d bytes. Sorry.", l);
00922
00923
00924 l++;
00926 STOPIF_CODE_ERR( write(fh, buffer, l) != l, errno,
00927 "Error writing the URL list");
00928 STOPIF_CODE_ERR( write(fh, "\n", 1) != 1, errno,
00929 "Error writing the URL list delimiter");
00930 DEBUGP("writing line %s", buffer);
00931 }
00932
00933 ex:
00934 if (fh != -1)
00935 {
00936 i=waa__close(fh, status);
00937 fh=-1;
00938 STOPIF(i, "Error closing the URL list");
00939 }
00940
00941 return status;
00942 }
00943
00944
00960 int url__open_session(svn_ra_session_t **session, char **missing_dirs)
00961 {
00962 int status;
00963 svn_error_t *status_svn;
00964 apr_hash_t *cfg;
00965 char *buffer, *cp;
00966 int exists;
00967 svn_revnum_t head;
00968
00969
00970 status=0;
00971 if (!current_url->pool)
00972 {
00973 STOPIF( apr_pool_create_ex(& current_url->pool, global_pool,
00974 NULL, NULL),
00975 "no pool");
00976 }
00977
00978 STOPIF( hlp__get_svn_config(&cfg), NULL);
00979
00980
00981 if (current_url->session) goto ex;
00982
00983
00984
00985
00986 STOPIF( hlp__strnalloc(current_url->urllen,
00987 &buffer, current_url->url), NULL);
00988 cp=buffer+current_url->urllen;
00989 BUG_ON(*cp);
00990
00991 STOPIF_SVNERR_TEXT( svn_ra_open,
00992 (& current_url->session, buffer,
00993 &cb__cb_table, NULL,
00994 cfg,
00995 current_url->pool),
00996 "svn_ra_open(\"%s\")", current_url->url);
00997 head=SVN_INVALID_REVNUM;
00998 STOPIF( url__canonical_rev( current_url, &head), NULL);
00999
01000 DEBUGP("Trying url %s@%ld", buffer, head);
01001 while (1)
01002 {
01003
01004 if (!missing_dirs) break;
01005
01006
01007
01008
01009
01010
01011
01012
01013
01014 STOPIF( cb__does_path_exist(current_url->session, "", head,
01015 &exists, current_url->pool), NULL);
01016 if (exists) break;
01017
01018
01019
01020
01021 while (cp > buffer+4 && *cp != '/') cp--;
01022
01023
01024 STOPIF_CODE_EPIPE(cp[-1] == '/', EINVAL,
01025 "!Unsuccessfull svn_ra_stat() on every try for URL \"%s\".",
01026 current_url->url);
01027
01028
01029 *cp=0;
01030
01031 DEBUGP("Reparent to %s", buffer);
01032 STOPIF_SVNERR( svn_ra_reparent,
01033 (current_url->session, buffer, current_url->pool));
01034 }
01035
01036
01037
01038 if (missing_dirs)
01039 {
01040 if (buffer + current_url->urllen == cp)
01041 {
01042 *missing_dirs=NULL;
01043 IF_FREE(buffer);
01044 }
01045 else
01046 {
01047
01048
01049
01050
01051
01052
01053
01054 strcpy(buffer, current_url->url + 1 + (cp - buffer));
01055
01056 DEBUGP("returning missing=%s", buffer);
01057 *missing_dirs=buffer;
01058 }
01059 }
01060 else IF_FREE(buffer);
01061
01062
01063 if (session)
01064 *session = current_url->session;
01065
01066 ex:
01067 return status;
01068 }
01069
01070
01073 int url__close_session(struct url_t *cur)
01074 {
01075
01076
01077 if (cur->pool)
01078 {
01079 DEBUGP("closing session and pool for %s", cur->url);
01080
01081 BUG_ON(cur->pool == NULL && cur->session != NULL);
01082 apr_pool_destroy(cur->pool);
01083 cur->session=NULL;
01084 cur->pool=NULL;
01085 }
01086
01087 return 0;
01088 }
01089
01090
01093 int url__close_sessions(void)
01094 {
01095 int status;
01096 int i;
01097
01098 status=0;
01099
01100 IF_FREE(url__parm_list);
01101 url__parm_list_len=url__parm_list_used=0;
01102
01103 for(i=0; i<urllist_count; i++)
01104 STOPIF( url__close_session( urllist[i] ), NULL);
01105
01106 ex:
01107 return status;
01108 }
01109
01110
01115 int url__current_has_precedence(struct url_t *to_compare)
01116 {
01117 return to_compare==NULL ||
01118 (current_url->priority <= to_compare->priority);
01119 }
01120
01121
01123 int url___dump(char *format)
01124 {
01125 int status;
01126 int i;
01127 char *cp;
01128 FILE *output=stdout;
01129 struct url_t *url;
01130
01131
01132 if (!format)
01133 format= opt__is_verbose()>0 ?
01134 "%u\\n\tname: \"%n\"; priority: %p; current revision: %r; "
01135 "target: %t; readonly:%R\\n" :
01136 "name:%n,prio:%p,target:%t,ro:%r,%u\\n";
01137
01138 status=0;
01139 for(i=0; i < urllist_count; i++)
01140 {
01141 url = urllist[i];
01142 cp=format;
01143
01144 while (*cp)
01145 {
01146 switch (cp[0])
01147 {
01148 case '\\':
01149 switch (cp[1])
01150 {
01151 case '\\':
01152 STOPIF_CODE_EPIPE( fputc('\\', output), NULL);
01153 break;
01154 case 'n':
01155 STOPIF_CODE_EPIPE( fputc('\n', output), NULL);
01156 break;
01157 case 'r':
01158 STOPIF_CODE_EPIPE( fputc('\r', output), NULL);
01159 break;
01160 case 't':
01161 STOPIF_CODE_EPIPE( fputc('\t', output), NULL);
01162 break;
01163 case 'f':
01164 STOPIF_CODE_EPIPE( fputc('\f', output), NULL);
01165 break;
01166 case 'x':
01167 status= cp[2] && cp[3] ? cs__two_ch2bin(cp+2) : -1;
01168 STOPIF_CODE_ERR(status <0, EINVAL,
01169 "A \"\\x\" sequence must have 2 hex digits.");
01170 STOPIF_CODE_EPIPE( fputc(status, output), NULL);
01171
01172 cp+=2;
01173 break;
01174 default:
01175 STOPIF_CODE_ERR(1, EINVAL,
01176 "Unknown escape sequence '\\%c' in format.",
01177 cp[1]);
01178 break;
01179 }
01180 cp+=2;
01181 break;
01182
01183 case '%':
01184 switch (cp[1])
01185 {
01186
01187 case 'n':
01188 STOPIF_CODE_EPIPE( fputs(url->name ?: "", output), NULL);
01189 break;
01190 case 't':
01191 STOPIF_CODE_EPIPE( fputs(
01192 hlp__rev_to_string(url->target_rev),
01193 output), NULL);
01194 break;
01195 case 'r':
01196 STOPIF_CODE_EPIPE( fputs(
01197 hlp__rev_to_string(url->current_rev),
01198 output), NULL);
01199 break;
01200 case 'R':
01201 STOPIF_CODE_EPIPE( fprintf(output, "%u",
01202 url->is_readonly), NULL);
01203 break;
01204 case 'I':
01205 STOPIF_CODE_EPIPE( fprintf(output, "%u",
01206 url->internal_number), NULL);
01207 break;
01208 case 'p':
01209 STOPIF_CODE_EPIPE( fprintf(output, "%u",
01210 url->priority), NULL);
01211 break;
01212 case 'u':
01213 STOPIF_CODE_EPIPE( fputs(url->url, output), NULL);
01214 break;
01215 case '%':
01216 STOPIF_CODE_EPIPE( fputc('%', output), NULL);
01217 break;
01218 default:
01219 STOPIF_CODE_ERR(1, EINVAL,
01220 "Invalid placeholder '%%%c' in format.",
01221 cp[1]);
01222 break;
01223 }
01224 cp+=2;
01225 break;
01226
01227 default:
01228 STOPIF_CODE_EPIPE( fputc(*cp, output), NULL);
01229 cp++;
01230 }
01231 }
01232 }
01233
01234 status=0;
01235
01236 ex:
01237 return status;
01238 }
01239
01240
01243 int url__other_full_url(struct estat *sts, struct url_t *url, char **output)
01244 {
01245 static const char none[]="(none)";
01246 static struct cache_t *cache=NULL;
01247 int status, len;
01248 char *data, *path;
01249
01250 status=0;
01251
01252 if (url)
01253 {
01254 STOPIF( ops__build_path( &path, sts), NULL);
01255 len=url->urllen + 1 + sts->path_len+1;
01256 STOPIF( cch__new_cache(&cache, 4), NULL);
01257
01258 STOPIF( cch__add(cache, 0, NULL, len, &data), NULL);
01259 strcpy( data, url->url);
01260
01261 if (path[0]=='.' && path[1]==0)
01262 {
01263
01264 }
01265 else
01266 {
01267
01268 if (path[0]=='.' && path[1]==PATH_SEPARATOR) path += 2;
01269
01270 data[url->urllen]='/';
01271 strcpy( data+url->urllen+1, path);
01272 }
01273
01274 *output=data;
01275 }
01276 else
01277 *output=(char*)none;
01278
01279 ex:
01280 return status;
01281 }
01282
01283
01285 int url__full_url(struct estat *sts, char **url)
01286 {
01287 int status;
01288
01289 STOPIF( url__other_full_url(sts, sts->url, url), NULL);
01290
01291 ex:
01292 return status;
01293 }
01294
01295
01296
01298 int url__find(char *url, struct url_t **output)
01299 {
01300 int i;
01301 struct url_t *cur;
01302
01303
01304
01305 for(i=0; i<urllist_count; i++)
01306 {
01307 cur=urllist[i];
01308 if (strncmp(cur->url, url, cur->urllen) == 0)
01309 {
01310 *output = cur;
01311 return 0;
01312 }
01313 }
01314
01315 return ENOENT;
01316 }
01317
01318
01321 int url__work(struct estat *root UNUSED, int argc, char *argv[])
01322 {
01323 int status, fh, l, i, had_it;
01324 char *dir;
01325 char *cp;
01326 int have_space;
01327 struct url_t *target;
01328 struct url_t *tmp;
01329 struct url_t **old_urllist;
01330 int old_urllist_count;
01331
01332
01333 dir=NULL;
01334 fh=-1;
01335
01336 STOPIF( waa__given_or_current_wd(NULL, &dir), NULL );
01337
01338 STOPIF( waa__set_working_copy(dir), NULL);
01339
01340
01341
01342
01343
01344 if (argc>0 && strcmp(argv[0], parm_load) == 0)
01345 {
01346
01347
01348
01349
01350
01351
01352 status=url__load_list(NULL, argc+1);
01353 if (!status || status == ENOENT)
01354 {
01355
01356 }
01357 else
01358 {
01359
01360 STOPIF_CODE_ERR_GOTO( 1, status, ignore_err,
01361 "!Got an error reading the old URL list, so the internal URL mappings\n"
01362 "cannot be kept; a \"sync-repos\" might be necessary.");
01363 ignore_err:
01364 ;
01365 }
01366
01367
01368 old_urllist_count=urllist_count;
01369 old_urllist=urllist;
01370 urllist=NULL;
01371 urllist_count=0;
01372
01373 status=0;
01374
01375
01376
01377 i=0;
01378 have_space=0;
01379 while (1)
01380 {
01381 if (have_space < 1)
01382 {
01383 have_space=32;
01384 STOPIF( url__allocate(have_space), NULL);
01385 }
01386
01387 status=hlp__string_from_filep(stdin, &cp, NULL, SFF_WHITESPACE);
01388 if (status == EOF) break;
01389
01390 DEBUGP("parsing %s into %d", cp, urllist_count);
01391 STOPIF( url__insert_or_replace(cp, &target, &had_it), NULL);
01392 DEBUGP("had=%d", had_it);
01393 if (!had_it)
01394 {
01395 have_space--;
01396 i++;
01397 }
01398 target->current_rev=0;
01399
01400
01401 if (url__find_by_url_in_list(target->url,
01402 old_urllist, old_urllist_count, &tmp) == 0)
01403 target->internal_number = tmp->internal_number;
01404 }
01405
01406 IF_FREE(old_urllist);
01407
01408 if (opt__is_verbose() >= 0)
01409 printf("%d URL%s loaded.\n", i, i==1 ? "" : "s");
01410 }
01411 else
01412 {
01413
01414 status=url__load_list(NULL, argc+1);
01415
01416 if (status == ENOENT)
01417 urllist_count=0;
01418 else
01419 STOPIF_CODE_ERR( status, status, NULL);
01420
01421
01422 if (argc == 0 || strcmp(argv[0], parm_dump) == 0)
01423 {
01424 STOPIF_CODE_ERR( status==ENOENT, ENOENT,
01425 "!No URLs defined for \"%s\".", dir);
01426
01427
01428 STOPIF( url___dump(argc ? argv[1] : NULL), NULL);
01429 goto ex;
01430 }
01431
01432
01433
01434 DEBUGP("%d to parse", argc);
01435
01436 for(l=0; l<argc; l++)
01437 {
01438 DEBUGP("parsing %s into %d", argv[l], urllist_count);
01439 STOPIF( url__insert_or_replace(argv[l], &target, &had_it), NULL);
01440 if (!had_it)
01441 target->current_rev=0;
01442 }
01443 }
01444
01445 STOPIF( waa__create_working_copy(dir), NULL);
01446
01447
01448 STOPIF( url__output_list(), NULL);
01449
01450 ex:
01451 return status;
01452 }
01453
01454
01460 int url__mark_todo(void)
01461 {
01462 int status;
01463 char *parm, *url_string, *rev_str, **list;
01464 static const char delim[]=",; \t\r\n\f";
01465 struct url_t *url;
01466
01467
01468 status=0;
01469 if (!url__parm_list_used) goto ex;
01470
01471
01472 url__parm_list[url__parm_list_used] = NULL;
01473 list=url__parm_list;
01474 while (*list)
01475 {
01476 parm=*(list++);
01477
01478 url_string=strtok(parm, delim);
01479 while (url_string && *url_string)
01480 {
01481 DEBUGP("marking URL %s", url_string);
01482
01483 rev_str=strchr(url_string, '@');
01484 if (rev_str) *(rev_str++)=0;
01485
01486 STOPIF( url__find_by_name(url_string, &url),
01487 "!No URL with name \"%s\" found", url_string);
01488
01489 if (url->to_be_handled)
01490 DEBUGP("URL %s mentioned multiple times", url->url);
01491 url->to_be_handled=1;
01492
01493 if (rev_str)
01494 {
01495 STOPIF( hlp__parse_rev(rev_str, NULL,
01496 & url->current_target_rev), NULL);
01497 url->current_target_override=1;
01498 }
01499
01500 url_string=strtok(NULL, delim);
01501 }
01502 }
01503
01504 ex:
01505 return status;
01506 }
01507
01508
01514 int url__store_url_name(char *parm)
01515 {
01516 int status;
01517
01518 status=0;
01519
01520 if (url__parm_list_used+2 >= url__parm_list_len)
01521 {
01522 url__parm_list_len= url__parm_list_len ? url__parm_list_len*2 : 8;
01523 STOPIF( hlp__realloc( &url__parm_list,
01524 url__parm_list_len*sizeof(*url__parm_list)), NULL);
01525 }
01526
01527 url__parm_list[url__parm_list_used++] = parm;
01528
01529 ex:
01530 return status;
01531 }
01532
01533
01543 int url__canonical_rev( struct url_t *url, svn_revnum_t *rev)
01544 {
01545 int status;
01546 svn_error_t *status_svn;
01547
01548
01549 status=0;
01550 status_svn=NULL;
01551 if (*rev == SVN_INVALID_REVNUM)
01552 {
01553 if (url->head_rev == SVN_INVALID_REVNUM)
01554 {
01555 BUG_ON( !url->session );
01556
01557
01558 STOPIF_SVNERR( svn_ra_get_latest_revnum,
01559 (url->session, & url->head_rev, url->pool));
01560
01561 DEBUGP("HEAD of %s is at %ld", url->url, url->head_rev);
01562 }
01563
01564 *rev=url->head_rev;
01565 }
01566
01567
01568 ex:
01569 return status;
01570 }
01571
01572
01585 int url__iterator2(svn_revnum_t *target_rev, int only_if_count,
01586 char **missing)
01587 {
01588 int status;
01589 static int last_index=-1;
01590 svn_revnum_t rev;
01591
01592
01593 status=0;
01594 if (!target_rev)
01595 {
01596 last_index=-1;
01597 goto ex;
01598 }
01599
01600
01601 while (1)
01602 {
01603 last_index++;
01604 if (last_index >= urllist_count)
01605 {
01606 DEBUGP("no more URLs.");
01607
01608 status=EOF;
01609 goto ex;
01610 }
01611
01612 current_url=urllist[last_index];
01613
01614 if (only_if_count)
01615 {
01616 if (!current_url->entry_list_count)
01617 {
01618 DEBUGP("No changes for url %s.", current_url->url);
01619 continue;
01620 }
01621 DEBUGP("%d changes for url %s.",
01622 current_url->entry_list_count, current_url->url);
01623 }
01624
01625 if (url__to_be_handled(current_url)) break;
01626 }
01627
01628 STOPIF( url__open_session(NULL, missing), NULL);
01629
01630
01631 if (current_url->current_target_override)
01632 rev=current_url->current_target_rev;
01633 else if (opt_target_revisions_given)
01634 rev=opt_target_revision;
01635 else
01636 rev=current_url->target_rev;
01637 DEBUGP("doing URL %s @ %s", current_url->url,
01638 hlp__rev_to_string(rev));
01639
01640 STOPIF( url__canonical_rev(current_url, &rev), NULL);
01641 *target_rev = rev;
01642
01643 ex:
01644 return status;
01645 }
01646