00001
00002
00003
00004
00005
00006
00007
00008
00009 #include <stdlib.h>
00010 #include <fcntl.h>
00011 #include <unistd.h>
00012 #include <ctype.h>
00013 #include <sys/select.h>
00014
00015
00016 #include "url.h"
00017 #include "waa.h"
00018 #include "cache.h"
00019 #include "helper.h"
00020 #include "est_ops.h"
00021 #include "checksum.h"
00022 #include "racallback.h"
00023
00024
00232 char **url__parm_list=NULL;
00233 int url__parm_list_len=0,
00234 url__parm_list_used=0;
00235
00236 int url__must_write_defs=0;
00237
00238
00245 int url__find_by_name(const char *name, struct url_t **storage)
00246 {
00247 int status;
00248 int i;
00249
00250
00251 if (name && !*name) name=NULL;
00252
00253 status=EADDRNOTAVAIL;
00254 for(i=0; i<urllist_count; i++)
00255 {
00256
00257 if (!urllist[i]->name ?
00258 (!name || !*name) :
00259 (strcmp(urllist[i]->name, name) == 0) )
00260 {
00261 if (storage) *storage=urllist[i];
00262 status=0;
00263 break;
00264 }
00265 }
00266
00267 if (status)
00268 DEBUGP("url with name %s not found!", name);
00269
00270 return status;
00271 }
00272
00273
00280 int url__find_by_url_in_list(char *url,
00281 struct url_t **list, int count,
00282 struct url_t **storage)
00283 {
00284 int status;
00285 int i;
00286
00287
00288 status=EADDRNOTAVAIL;
00289 for(i=0; i<count; i++)
00290 {
00291 if (strcmp(list[i]->url, url) == 0)
00292 {
00293 if (storage) *storage=list[i];
00294 status=0;
00295 break;
00296 }
00297 }
00298
00299 if (status)
00300 DEBUGP("url with url %s not found!", url);
00301
00302 return status;
00303 }
00304
00305
00307 int url__find_by_url(char *url, struct url_t **storage)
00308 {
00309 return url__find_by_url_in_list(url, urllist, urllist_count, storage);
00310 }
00311
00314 int url__find_by_intnum(int intnum, struct url_t **storage)
00315 {
00316 int status;
00317 int i;
00318
00319
00320
00321
00322 status=EADDRNOTAVAIL;
00323 for(i=0; i<urllist_count; i++)
00324 {
00325 if (urllist[i]->internal_number == intnum)
00326 {
00327 if (storage) *storage=urllist[i];
00328 status=0;
00329 break;
00330 }
00331 }
00332
00333 if (status)
00334 DEBUGP("url with intnum %d not found!", intnum);
00335 else
00336 DEBUGP("url with intnum %d is %s", intnum, (*storage)->url);
00337
00338 return status;
00339 }
00340
00341
00345 #define HAVE_NAME (1)
00346 #define HAVE_PRIO (2)
00347 #define HAVE_URL (4)
00348 #define HAVE_TARGET (8)
00349 #define HAVE_READONLY (16)
00350
00362 int url__parse(char *input, struct url_t *storage, int *def_parms)
00363 {
00364 int status;
00365 char *cp, *value, *end, *cur;
00366 struct url_t eurl;
00367 int nlen, vlen, have_seen;
00368
00369
00370 status=0;
00371
00372 have_seen=0;
00373 memset(&eurl, 0, sizeof(eurl));
00374
00375
00376
00377 eurl.internal_number=INVALID_INTERNAL_NUMBER;
00378 eurl.current_rev=0;
00379 eurl.target_rev=SVN_INVALID_REVNUM;
00380 eurl.current_target_override=0;
00381 eurl.head_rev=SVN_INVALID_REVNUM;
00382 cur=input;
00383
00384 DEBUGP("input: %s", input);
00385 while (! (have_seen & HAVE_URL))
00386 {
00387
00388
00389
00390
00391
00392
00393
00394
00395
00396
00397 end=cur;
00398 value=NULL;
00399 while (*end)
00400 {
00401
00402
00403
00404 if (*end == ':' && !value)
00405 value = end+1;
00406
00407 if (*end == ',') break;
00408
00409 end++;
00410 }
00411
00412
00413 nlen = (value ? value-1 : end) - cur;
00414 vlen = value ? end - value : 0;
00415
00416 DEBUGP("cur=%s value=%s end=%s vlen=%d nlen=%d",
00417 cur, value, end, vlen, nlen);
00418
00419 if (strncmp("name", cur, nlen) == 0 ||
00420 strncmp("N", cur, nlen) == 0)
00421 {
00422 STOPIF_CODE_ERR( have_seen & HAVE_NAME, EINVAL,
00423 "!Found two names in URL '%s'; only one may be given.",
00424 input);
00425 if (!value) goto need_value;
00426
00427
00428 if (vlen==0)
00429 DEBUGP("NULL name");
00430 else if (storage)
00431 {
00432
00433
00434
00435 STOPIF( hlp__strnalloc(vlen, &eurl.name, value), NULL);
00436
00437 DEBUGP("got a name '%s' (%d bytes), going on with '%s'",
00438 eurl.name, vlen, end);
00439 have_seen |= HAVE_NAME;
00440 }
00441 }
00442 else if (strncmp("target", cur, nlen) == 0 ||
00443 strncmp("T", cur, nlen) == 0)
00444 {
00445 STOPIF_CODE_ERR( have_seen & HAVE_TARGET, EINVAL,
00446 "!Already got a target revision in URL '%s'.",
00447 input);
00448 if (!value) goto need_value;
00449 STOPIF( hlp__parse_rev( value, &cp, & eurl.target_rev), NULL);
00450 STOPIF_CODE_ERR( cp == value || cp != end, EINVAL,
00451 "The given target revision in '%s' is invalid.",
00452 input);
00453 DEBUGP("got target %s", hlp__rev_to_string(eurl.target_rev));
00454 have_seen |= HAVE_TARGET;
00455 }
00456 else if (strncmp("prio", cur, nlen) == 0 ||
00457 strncmp("P", cur, nlen) == 0)
00458 {
00459 STOPIF_CODE_ERR( have_seen & HAVE_PRIO, EINVAL,
00460 "!Found two priorities in URL '%s'; only one allowed.",
00461 input);
00462 if (!value) goto need_value;
00463 eurl.priority=strtol(value, &cp, 0);
00464 STOPIF_CODE_ERR( cp == value || cp != end, EINVAL,
00465 "!The given url \"%s\" is invalid; cannot parse the priority.",
00466 input);
00467 DEBUGP("got priority %d", eurl.priority);
00468 have_seen |= HAVE_PRIO;
00469 }
00470 else if (strncmp("readonly", cur, nlen) == 0 ||
00471 strncmp("ro", cur, nlen) == 0)
00472 {
00473 STOPIF_CODE_ERR( have_seen & HAVE_READONLY, EINVAL,
00474 "!Found two readonly flags in URL \"%s\"; only one allowed.",
00475 input);
00476 if (value)
00477 {
00478 eurl.is_readonly=strtol(value, &cp, 0);
00479 STOPIF_CODE_ERR( cp == value || cp != end, EINVAL,
00480 "!Cannot parse the readonly flag in \"%s\".", input);
00481 }
00482 else
00483 eurl.is_readonly=1;
00484
00485 have_seen |= HAVE_READONLY;
00486 }
00487 else
00488 {
00489
00490
00491
00492
00493
00494
00495
00496
00497 nlen++;
00498 if (strncmp("svn+", cur, 4) == 0)
00499 {
00500
00501
00502 STOPIF_CODE_ERR(nlen <= 5, EINVAL,
00503 "!No tunnel given after \"svn+\" in \"%s\".", cur);
00504 }
00505 else if (
00506 (nlen == 4 && strncmp("svn:", cur, nlen) == 0) ||
00507 (nlen == 5 &&
00508 (strncmp("http:", cur, nlen) == 0 ||
00509 strncmp("file:", cur, nlen) == 0) ) ||
00510 (nlen == 6 && strncmp("https:", cur, nlen) == 0))
00511 DEBUGP("known protocol found");
00512 else
00513 STOPIF_CODE_ERR(1, EINVAL,
00514 "!The protocol given in \"%s\" is unknown!", cur);
00515
00516
00517
00518 if (!value || vlen<3 || strncmp(value, "//", 2)!=0)
00519 STOPIF_CODE_ERR(1, EINVAL, "!The URL in \"%s\" is invalid.", cur);
00520
00521
00522
00523
00524
00525
00526
00527
00528
00529 while (vlen>3 && value[vlen-1] == '/')
00530 value[--vlen] = 0;
00531
00532
00533
00534
00535 eurl.urllen=nlen + 0 + 1 + vlen - 1;
00536 STOPIF( hlp__strdup( &eurl.url, cur), NULL);
00537
00538 have_seen |= HAVE_URL;
00539 }
00540
00541 while (*end == ',') end++;
00542 if (!*end) break;
00543 cur=end;
00544 }
00545
00546
00547 if (def_parms)
00548 *def_parms=have_seen;
00549 else
00550 STOPIF_CODE_ERR( !(have_seen & HAVE_URL), EINVAL,
00551 "!No URL found in %s", input);
00552
00553 if (storage) *storage=eurl;
00554
00555
00556
00557 url__must_write_defs=1;
00558
00559 ex:
00560 return status;
00561
00562 need_value:
00563 STOPIF(EINVAL,
00564 "!Specification '%s' is not a valid URL - ':' missing.", input);
00565 goto ex;
00566 }
00567
00568
00580 int url__insert_or_replace(char *eurl,
00581 struct url_t **storage,
00582 int *existed)
00583 {
00584 int status;
00585 int seen;
00586 struct url_t target, *dupl, *dest, *by_name;
00587
00588
00589 status=0;
00590 STOPIF( url__parse(eurl, &target, &seen), NULL);
00591
00592
00593 by_name=NULL;
00594
00595 if (seen & HAVE_NAME)
00596 url__find_by_name(target.name, &by_name);
00597
00598 dupl=NULL;
00599
00600 if (seen & HAVE_URL)
00601 url__find_by_url(target.url, &dupl);
00602 else
00603 {
00604
00605 dupl=by_name;
00606 }
00607
00608
00609 if (!dupl)
00610 {
00611 if (!(seen & HAVE_URL))
00612 {
00613 STOPIF( EINVAL,
00614 !(seen & HAVE_NAME) ?
00615 "!No URL was given in \"%s\"." :
00616 "!Cannot find the name given in \"%s\", so cannot modify an URL.",
00617 eurl);
00618 }
00619 if (seen & HAVE_NAME)
00620 {
00621
00622 STOPIF_CODE_ERR( by_name, EADDRINUSE,
00623 "!There's already an url named \"%s\"", target.name);
00624
00625
00626 status=0;
00627 }
00628
00629
00630 dest=urllist[urllist_count];
00631 *dest = target;
00632 urllist_count++;
00633 }
00634 else
00635 {
00636
00637
00638
00639
00640
00641
00642 if (seen & HAVE_TARGET)
00643 dupl->target_rev = target.target_rev;
00644 if (seen & HAVE_PRIO)
00645 dupl->priority = target.priority;
00646 if (seen & HAVE_READONLY)
00647 dupl->is_readonly = target.is_readonly;
00648 if (seen & HAVE_NAME)
00649 dupl->name = target.name;
00650
00651
00652 dest=dupl;
00653 }
00654
00655 if (existed)
00656 *existed = dupl ? EEXIST : 0;
00657
00658 if (storage)
00659 *storage=dest;
00660
00661 ex:
00662 return status;
00663 }
00664
00665
00669 int find_next_zero_bit(fd_set *fd, int from)
00670 {
00671 while (FD_ISSET(from, fd)) from++;
00672 return from;
00673 }
00674
00675
00687 int url___set_internal_nums(void)
00688 {
00689 int status;
00690 int i, j, bit;
00691 fd_set bitmap;
00692
00693
00694
00695
00696
00697 STOPIF_CODE_ERR( sizeof(bitmap)*8 < urllist_count, EMFILE,
00698 "Your fd_set is too small for the number of urls.\n"
00699 "Please contact dev@fsvs.tigris.org for help.");
00700
00701 status=0;
00702 FD_ZERO(&bitmap);
00703
00704 for(i=0; i<urllist_count; i++)
00705 {
00706 if (urllist[i]->internal_number > urllist_count)
00707 {
00708
00709
00710 for(j=i+1; j<urllist_count; j++)
00711 STOPIF_CODE_ERR(
00712 urllist[i]->internal_number == urllist[j]->internal_number,
00713 EINVAL, "The URLs %s and %s have identical internal numbers!",
00714 urllist[i]->url, urllist[j]->url);
00715 }
00716 else if (urllist[i]->internal_number != INVALID_INTERNAL_NUMBER)
00717 {
00718 STOPIF_CODE_ERR( FD_ISSET(urllist[i]->internal_number, &bitmap),
00719 EINVAL,
00720 "The URL %s has a duplicate internal number!",
00721 urllist[i]->url);
00722
00723 FD_SET(urllist[i]->internal_number, &bitmap);
00724 }
00725 }
00726
00727
00728 bit=1;
00729 for(i=0; i<urllist_count; i++)
00730 {
00731 DEBUGP("inum for %s is %d",
00732 urllist[i]->url, urllist[i]->internal_number);
00733 if (urllist[i]->internal_number == INVALID_INTERNAL_NUMBER)
00734 {
00735
00736 bit= find_next_zero_bit(&bitmap, bit);
00737 DEBUGP("found a free bit for %s: %d",
00738 urllist[i]->url, bit);
00739
00740 urllist[i]->internal_number=bit;
00741
00742
00743 bit++;
00744 }
00745 }
00746
00747 ex:
00748 return status;
00749 }
00750
00751
00753 int url__allocate(int reserve_space)
00754 {
00755 int status;
00756 struct url_t *url_mem;
00757 int i;
00758
00759
00760 status=0;
00761
00762 STOPIF( hlp__realloc( &urllist,
00763 sizeof(*urllist) * (urllist_count+1+reserve_space)), NULL);
00764 STOPIF( hlp__calloc( &url_mem, sizeof(*url_mem), reserve_space), NULL);
00765
00766
00767 for(i=0; i<reserve_space; i++)
00768 {
00769 urllist[urllist_count+i]=url_mem+i;
00770 }
00771 urllist[urllist_count+i]=NULL;
00772
00773 ex:
00774 return status;
00775 }
00776
00777
00779 int url__indir_sorter(const void *a, const void *b)
00780 {
00781 struct url_t *u1=*(struct url_t **)a,
00782 *u2=*(struct url_t **)b;
00783
00784 return url__sorter(u1, u2);
00785 }
00786
00787
00804 int url__load_list(char *dir, int reserve_space)
00805 {
00806 int status, fh, l, i;
00807 struct stat64 st;
00808 char *urllist_mem;
00809 int inum, cnt, new_count;
00810 svn_revnum_t rev;
00811 int intnum;
00812 struct url_t *target;
00813 FILE *rev_in;
00814 char *buffer;
00815
00816
00817 fh=-1;
00818 urllist_mem=NULL;
00819
00820
00821
00822 status=waa__open_byext(dir, WAA__URLLIST_EXT, WAA__READ, &fh);
00823 if (status==ENOENT)
00824 {
00825 STOPIF( url__allocate(reserve_space), NULL);
00826 status=ENOENT;
00827 goto ex;
00828 }
00829
00830 STOPIF_CODE_ERR(status, status, "Cannot read URL list");
00831
00832 STOPIF_CODE_ERR( fstat64(fh, &st) == -1, errno,
00833 "fstat() of url-list");
00834
00835
00836 STOPIF( hlp__alloc( &urllist_mem, st.st_size+1), NULL);
00837
00838 status=read(fh, urllist_mem, st.st_size);
00839 STOPIF_CODE_ERR( status != st.st_size, errno,
00840 "error reading url-list");
00841
00842 urllist_mem[st.st_size]=0;
00843
00844
00845 new_count=0;
00846 for(l=0; l<st.st_size; )
00847 {
00848 while (isspace(urllist_mem[l])) l++;
00849
00850 if (urllist_mem[l]) new_count++;
00851 l += strlen(urllist_mem+l)+1;
00852 }
00853
00854 DEBUGP("found %d urls", new_count);
00855 STOPIF( url__allocate(reserve_space+new_count), NULL);
00856
00857
00858 for(l=i=0; i<new_count; )
00859 {
00860
00861
00862 while (isspace(urllist_mem[l])) l++;
00863
00864 DEBUGP("url %d of %d: %s",i, new_count, urllist_mem+l);
00865 if (urllist_mem[l])
00866 {
00867 STOPIF_CODE_ERR(
00868 sscanf(urllist_mem+l, "%d %ld %n",
00869 &inum, &rev, &cnt) != 2,
00870 EINVAL,
00871 "Cannot parse urllist line '%s'", urllist_mem+l);
00872
00873 STOPIF( url__insert_or_replace(urllist_mem+l+cnt, &target, NULL), NULL);
00874 target->internal_number=inum;
00875 target->current_rev=rev;
00876
00877 i++;
00878 l += strlen(urllist_mem+l);
00879 }
00880
00881
00882 l++;
00883 }
00884
00885 STOPIF_CODE_ERR( close(fh) == -1, errno, "closing the url-list");
00886 fh=-1;
00887
00888
00889
00890
00891
00892
00893 status=waa__open_byext(dir, WAA__URL_REVS, WAA__READ, &fh);
00894 if (status==ENOENT)
00895 {
00896 DEBUGP("No file; upgrading?");
00897 status=0;
00898 }
00899 else
00900 {
00901
00902 rev_in=fdopen(fh, "r");
00903 while (1)
00904 {
00905 status=hlp__string_from_filep(rev_in, &buffer, NULL, 0);
00906 if (status == EOF)
00907 {
00908 status=0;
00909 break;
00910 }
00911 STOPIF( status, "Failed to read copyfrom source");
00912
00913 STOPIF_CODE_ERR( sscanf(buffer, "%d %lu 0 0 0 0\n",
00914 &intnum, &rev) != 2, EINVAL,
00915 "Error parsing line \"%s\" from %s", buffer, WAA__URL_REVS);
00916
00917 STOPIF( url__find_by_intnum(intnum, &target),
00918 "URL number %d read from %s not found",
00919 intnum, WAA__URL_REVS);
00920
00921 target->current_rev=rev;
00922 }
00923 STOPIF_CODE_ERR( fclose(rev_in)==-1, errno,
00924 "error closing %s", WAA__URL_REVS);
00925 fh=-1;
00926 }
00927
00928
00929
00930 qsort(urllist, urllist_count, sizeof(*urllist), url__indir_sorter);
00931
00932
00933 url__must_write_defs=0;
00934
00935 ex:
00936
00937 if (fh!=-1)
00938 {
00939 l=close(fh);
00940 STOPIF_CODE_ERR(l == -1 && !status, errno, "closing the url-list");
00941 }
00942
00943 return status;
00944 }
00945
00946
00950 int url__load_nonempty_list(char *dir, int reserve_space)
00951 {
00952 int status, load_st;
00953
00954 status=0;
00955 if (!dir) dir=wc_path;
00956
00957 load_st=url__load_list(dir, reserve_space);
00958 STOPIF_CODE_ERR( load_st==ENOENT ||
00959 urllist_count==0, ENOENT,
00960 "!No URLs have been defined for %s.", dir);
00961
00962 ex:
00963 return status;
00964 }
00965
00966
00975 int url__output_list(void)
00976 {
00977 int status, i, fh, l, fh_revs;
00978 char buffer[1024];
00979 struct url_t *url;
00980
00981
00982 fh=-1;
00983 fh_revs=-1;
00984
00985 STOPIF( url___set_internal_nums(),
00986 "Setting the internal numbers failed.");
00987
00988 if (url__must_write_defs)
00989 STOPIF( waa__open_byext(NULL, WAA__URLLIST_EXT, WAA__WRITE, &fh), NULL);
00990
00991 STOPIF( waa__open_byext(NULL, WAA__URL_REVS, WAA__WRITE, &fh_revs), NULL);
00992 for(i=0; i<urllist_count; i++)
00993 {
00994 url=urllist[i];
00995
00996 if (url->target_rev == 0 && url->current_rev == 0)
00997 continue;
00998
00999 if (fh != -1)
01000 {
01001 l=snprintf(buffer, sizeof(buffer),
01002 "%d %d T:%ld,N:%s,P:%d,ro:%u,%s",
01003 url->internal_number,
01004 0,
01005 url->target_rev,
01006 url->name ? url->name : "",
01007 url->priority,
01008 url->is_readonly,
01009 url->url);
01010
01011 STOPIF_CODE_ERR( l > sizeof(buffer)-4, E2BIG,
01012 "You've got too long URLs; I'd need %d bytes. Sorry.", l);
01013
01014
01015 l++;
01017 STOPIF_CODE_ERR( write(fh, buffer, l) != l, errno,
01018 "Error writing the URL list");
01019 STOPIF_CODE_ERR( write(fh, "\n", 1) != 1, errno,
01020 "Error writing the URL list delimiter");
01021 DEBUGP("writing line %s", buffer);
01022 }
01023
01024
01025
01026 l=snprintf(buffer, sizeof(buffer),
01027 "%d %ld 0 0 0 0\n",
01028 url->internal_number,
01029 url->current_rev);
01030
01031 BUG_ON( l > sizeof(buffer)-4);
01032 STOPIF_CODE_ERR( write(fh_revs, buffer, l) != l, errno,
01033 "Error writing the URL list");
01034 }
01035
01036 url__must_write_defs=0;
01037
01038 ex:
01039 if (fh != -1)
01040 {
01041 i=waa__close(fh, status);
01042 fh=-1;
01043 STOPIF(i, "Error closing the URL list");
01044 }
01045
01046 if (fh_revs != -1)
01047 {
01048 i=waa__close(fh_revs, status);
01049 fh_revs=-1;
01050 STOPIF(i, "Error closing the revisions list");
01051 }
01052
01053 return status;
01054 }
01055
01056
01072 int url__open_session(svn_ra_session_t **session, char **missing_dirs)
01073 {
01074 int status;
01075 svn_error_t *status_svn;
01076 apr_hash_t *cfg;
01077 char *buffer, *cp;
01078 int exists;
01079 svn_revnum_t head;
01080
01081
01082 status=0;
01083 if (!current_url->pool)
01084 {
01085 STOPIF( apr_pool_create_ex(& current_url->pool, global_pool,
01086 NULL, NULL),
01087 "no pool");
01088 }
01089
01090 STOPIF( hlp__get_svn_config(&cfg), NULL);
01091
01092
01093 if (current_url->session) goto ex;
01094
01095
01096
01097
01098 STOPIF( hlp__strnalloc(current_url->urllen,
01099 &buffer, current_url->url), NULL);
01100 cp=buffer+current_url->urllen;
01101 BUG_ON(*cp);
01102
01103 STOPIF_SVNERR_TEXT( svn_ra_open,
01104 (& current_url->session, buffer,
01105 &cb__cb_table, NULL,
01106 cfg,
01107 current_url->pool),
01108 "svn_ra_open(\"%s\")", current_url->url);
01109 head=SVN_INVALID_REVNUM;
01110 STOPIF( url__canonical_rev( current_url, &head), NULL);
01111
01112 DEBUGP("Trying url %s@%ld", buffer, head);
01113 while (1)
01114 {
01115
01116 if (!missing_dirs) break;
01117
01118
01119
01120
01121
01122
01123
01124
01125
01126 STOPIF( cb__does_path_exist(current_url->session, "", head,
01127 &exists, current_url->pool), NULL);
01128 if (exists) break;
01129
01130
01131
01132
01133 while (cp > buffer+4 && *cp != '/') cp--;
01134
01135
01136 STOPIF_CODE_EPIPE(cp[-1] == '/', EINVAL,
01137 "!Unsuccessfull svn_ra_stat() on every try for URL \"%s\".",
01138 current_url->url);
01139
01140
01141 *cp=0;
01142
01143 DEBUGP("Reparent to %s", buffer);
01144 STOPIF_SVNERR( svn_ra_reparent,
01145 (current_url->session, buffer, current_url->pool));
01146 }
01147
01148
01149
01150 if (missing_dirs)
01151 {
01152 if (buffer + current_url->urllen == cp)
01153 {
01154 *missing_dirs=NULL;
01155 IF_FREE(buffer);
01156 }
01157 else
01158 {
01159
01160
01161
01162
01163
01164
01165
01166 strcpy(buffer, current_url->url + 1 + (cp - buffer));
01167
01168 DEBUGP("returning missing=%s", buffer);
01169 *missing_dirs=buffer;
01170 }
01171 }
01172 else IF_FREE(buffer);
01173
01174
01175 if (session)
01176 *session = current_url->session;
01177
01178 ex:
01179 return status;
01180 }
01181
01182
01185 int url__close_session(struct url_t *cur)
01186 {
01187
01188
01189 if (cur->pool)
01190 {
01191 DEBUGP("closing session and pool for %s", cur->url);
01192
01193 BUG_ON(cur->pool == NULL && cur->session != NULL);
01194 apr_pool_destroy(cur->pool);
01195 cur->session=NULL;
01196 cur->pool=NULL;
01197 }
01198
01199 return 0;
01200 }
01201
01202
01205 int url__close_sessions(void)
01206 {
01207 int status;
01208 int i;
01209
01210 status=0;
01211
01212 IF_FREE(url__parm_list);
01213 url__parm_list_len=url__parm_list_used=0;
01214
01215 for(i=0; i<urllist_count; i++)
01216 STOPIF( url__close_session( urllist[i] ), NULL);
01217
01218 ex:
01219 return status;
01220 }
01221
01222
01227 int url__current_has_precedence(struct url_t *to_compare)
01228 {
01229 return to_compare==NULL ||
01230 (current_url->priority <= to_compare->priority);
01231 }
01232
01233
01235 int url___dump(char *format)
01236 {
01237 int status;
01238 int i;
01239 char *cp;
01240 FILE *output=stdout;
01241 struct url_t *url;
01242
01243
01244 if (!format)
01245 format= opt__is_verbose()>0 ?
01246 "%u\\n\tname: \"%n\"; priority: %p; current revision: %r; "
01247 "target: %t; readonly:%R\\n" :
01248 "name:%n,prio:%p,target:%t,ro:%R,%u\\n";
01249
01250 status=0;
01251 for(i=0; i < urllist_count; i++)
01252 {
01253 url = urllist[i];
01254 cp=format;
01255
01256 while (*cp)
01257 {
01258 switch (cp[0])
01259 {
01260 case '\\':
01261 switch (cp[1])
01262 {
01263 case '\\':
01264 STOPIF_CODE_EPIPE( fputc('\\', output), NULL);
01265 break;
01266 case 'n':
01267 STOPIF_CODE_EPIPE( fputc('\n', output), NULL);
01268 break;
01269 case 'r':
01270 STOPIF_CODE_EPIPE( fputc('\r', output), NULL);
01271 break;
01272 case 't':
01273 STOPIF_CODE_EPIPE( fputc('\t', output), NULL);
01274 break;
01275 case 'f':
01276 STOPIF_CODE_EPIPE( fputc('\f', output), NULL);
01277 break;
01278 case 'x':
01279 status= cp[2] && cp[3] ? cs__two_ch2bin(cp+2) : -1;
01280 STOPIF_CODE_ERR(status <0, EINVAL,
01281 "A \"\\x\" sequence must have 2 hex digits.");
01282 STOPIF_CODE_EPIPE( fputc(status, output), NULL);
01283
01284 cp+=2;
01285 break;
01286 default:
01287 STOPIF_CODE_ERR(1, EINVAL,
01288 "Unknown escape sequence '\\%c' in format.",
01289 cp[1]);
01290 break;
01291 }
01292 cp+=2;
01293 break;
01294
01295 case '%':
01296 switch (cp[1])
01297 {
01298
01299 case 'n':
01300 STOPIF_CODE_EPIPE( fputs(url->name ?: "", output), NULL);
01301 break;
01302 case 't':
01303 STOPIF_CODE_EPIPE( fputs(
01304 hlp__rev_to_string(url->target_rev),
01305 output), NULL);
01306 break;
01307 case 'r':
01308 STOPIF_CODE_EPIPE( fputs(
01309 hlp__rev_to_string(url->current_rev),
01310 output), NULL);
01311 break;
01312 case 'R':
01313 STOPIF_CODE_EPIPE( fprintf(output, "%u",
01314 url->is_readonly), NULL);
01315 break;
01316 case 'I':
01317 STOPIF_CODE_EPIPE( fprintf(output, "%u",
01318 url->internal_number), NULL);
01319 break;
01320 case 'p':
01321 STOPIF_CODE_EPIPE( fprintf(output, "%u",
01322 url->priority), NULL);
01323 break;
01324 case 'u':
01325 STOPIF_CODE_EPIPE( fputs(url->url, output), NULL);
01326 break;
01327 case '%':
01328 STOPIF_CODE_EPIPE( fputc('%', output), NULL);
01329 break;
01330 default:
01331 STOPIF_CODE_ERR(1, EINVAL,
01332 "Invalid placeholder '%%%c' in format.",
01333 cp[1]);
01334 break;
01335 }
01336 cp+=2;
01337 break;
01338
01339 default:
01340 STOPIF_CODE_EPIPE( fputc(*cp, output), NULL);
01341 cp++;
01342 }
01343 }
01344 }
01345
01346 status=0;
01347
01348 ex:
01349 return status;
01350 }
01351
01352
01355 int url__other_full_url(struct estat *sts, struct url_t *url, char **output)
01356 {
01357 static const char none[]="(none)";
01358 static struct cache_t *cache=NULL;
01359 int status, len;
01360 char *data, *path;
01361
01362 status=0;
01363
01364 if (url)
01365 {
01366 STOPIF( ops__build_path( &path, sts), NULL);
01367 len=url->urllen + 1 + sts->path_len+1;
01368 STOPIF( cch__new_cache(&cache, 4), NULL);
01369
01370 STOPIF( cch__add(cache, 0, NULL, len, &data), NULL);
01371 strcpy( data, url->url);
01372
01373 if (path[0]=='.' && path[1]==0)
01374 {
01375
01376 }
01377 else
01378 {
01379
01380 if (path[0]=='.' && path[1]==PATH_SEPARATOR) path += 2;
01381
01382 data[url->urllen]='/';
01383 strcpy( data+url->urllen+1, path);
01384 }
01385
01386 *output=data;
01387 }
01388 else
01389 *output=(char*)none;
01390
01391 ex:
01392 return status;
01393 }
01394
01395
01397 int url__full_url(struct estat *sts, char **url)
01398 {
01399 int status;
01400
01401 STOPIF( url__other_full_url(sts, sts->url, url), NULL);
01402
01403 ex:
01404 return status;
01405 }
01406
01407
01408
01410 int url__find(char *url, struct url_t **output)
01411 {
01412 int i;
01413 struct url_t *cur;
01414
01415
01416
01417 for(i=0; i<urllist_count; i++)
01418 {
01419 cur=urllist[i];
01420 if (strncmp(cur->url, url, cur->urllen) == 0)
01421 {
01422 *output = cur;
01423 return 0;
01424 }
01425 }
01426
01427 return ENOENT;
01428 }
01429
01430
01433 int url__work(struct estat *root UNUSED, int argc, char *argv[])
01434 {
01435 int status, fh, l, i, had_it;
01436 char *dir;
01437 char *cp;
01438 int have_space;
01439 struct url_t *target;
01440 struct url_t *tmp;
01441 struct url_t **old_urllist;
01442 int old_urllist_count;
01443
01444
01445 dir=NULL;
01446 fh=-1;
01447
01448 STOPIF( waa__given_or_current_wd(NULL, &dir), NULL );
01449
01450 STOPIF( waa__set_working_copy(dir), NULL);
01451
01452
01453
01454
01455
01456 if (argc>0 && strcmp(argv[0], parm_load) == 0)
01457 {
01458
01459
01460
01461
01462
01463
01464 status=url__load_list(NULL, argc+1);
01465 if (!status || status == ENOENT)
01466 {
01467
01468 }
01469 else
01470 {
01471
01472 STOPIF_CODE_ERR_GOTO( 1, status, ignore_err,
01473 "!Got an error reading the old URL list, so the internal URL mappings\n"
01474 "cannot be kept; a \"sync-repos\" might be necessary.");
01475 ignore_err:
01476 ;
01477 }
01478
01479
01480 old_urllist_count=urllist_count;
01481 old_urllist=urllist;
01482 urllist=NULL;
01483 urllist_count=0;
01484
01485
01486 url__must_write_defs=1;
01487
01488 status=0;
01489
01490
01491
01492 i=0;
01493 have_space=0;
01494 while (1)
01495 {
01496 if (have_space < 1)
01497 {
01498 have_space=32;
01499 STOPIF( url__allocate(have_space), NULL);
01500 }
01501
01502 status=hlp__string_from_filep(stdin, &cp, NULL, SFF_WHITESPACE);
01503 if (status == EOF) break;
01504
01505 DEBUGP("parsing %s into %d", cp, urllist_count);
01506 STOPIF( url__insert_or_replace(cp, &target, &had_it), NULL);
01507 DEBUGP("had=%d", had_it);
01508 if (!had_it)
01509 {
01510 have_space--;
01511 i++;
01512 }
01513 target->current_rev=0;
01514
01515
01516 if (url__find_by_url_in_list(target->url,
01517 old_urllist, old_urllist_count, &tmp) == 0)
01518 target->internal_number = tmp->internal_number;
01519 }
01520
01521 IF_FREE(old_urllist);
01522
01523 if (opt__is_verbose() >= 0)
01524 printf("%d URL%s loaded.\n", i, i==1 ? "" : "s");
01525 }
01526 else
01527 {
01528
01529 status=url__load_list(NULL, argc+1);
01530
01531 if (status == ENOENT)
01532 urllist_count=0;
01533 else
01534 STOPIF_CODE_ERR( status, status, NULL);
01535
01536
01537 if (argc == 0 || strcmp(argv[0], parm_dump) == 0)
01538 {
01539 STOPIF_CODE_ERR( status==ENOENT, ENOENT,
01540 "!No URLs defined for \"%s\".", dir);
01541
01542
01543 STOPIF( url___dump(argc ? argv[1] : NULL), NULL);
01544 goto ex;
01545 }
01546
01547
01548
01549 DEBUGP("%d to parse", argc);
01550
01551 for(l=0; l<argc; l++)
01552 {
01553 DEBUGP("parsing %s into %d", argv[l], urllist_count);
01554 STOPIF( url__insert_or_replace(argv[l], &target, &had_it), NULL);
01555 if (!had_it)
01556 target->current_rev=0;
01557 }
01558 }
01559
01560 STOPIF( waa__create_working_copy(dir), NULL);
01561
01562
01563 STOPIF( url__output_list(), NULL);
01564
01565 ex:
01566 return status;
01567 }
01568
01569
01575 int url__mark_todo(void)
01576 {
01577 int status;
01578 char *parm, *url_string, *rev_str, **list;
01579 static const char delim[]=",; \t\r\n\f";
01580 struct url_t *url;
01581
01582
01583 status=0;
01584 if (!url__parm_list_used) goto ex;
01585
01586
01587 url__parm_list[url__parm_list_used] = NULL;
01588 list=url__parm_list;
01589 while (*list)
01590 {
01591 parm=*(list++);
01592
01593 url_string=strtok(parm, delim);
01594 while (url_string && *url_string)
01595 {
01596 DEBUGP("marking URL %s", url_string);
01597
01598 rev_str=strchr(url_string, '@');
01599 if (rev_str) *(rev_str++)=0;
01600
01601 STOPIF( url__find_by_name(url_string, &url),
01602 "!No URL with name \"%s\" found", url_string);
01603
01604 if (url->to_be_handled)
01605 DEBUGP("URL %s mentioned multiple times", url->url);
01606 url->to_be_handled=1;
01607
01608 if (rev_str)
01609 {
01610 STOPIF( hlp__parse_rev(rev_str, NULL,
01611 & url->current_target_rev), NULL);
01612 url->current_target_override=1;
01613 }
01614
01615 url_string=strtok(NULL, delim);
01616 }
01617 }
01618
01619 ex:
01620 return status;
01621 }
01622
01623
01629 int url__store_url_name(char *parm)
01630 {
01631 int status;
01632
01633 status=0;
01634
01635 if (url__parm_list_used+2 >= url__parm_list_len)
01636 {
01637 url__parm_list_len= url__parm_list_len ? url__parm_list_len*2 : 8;
01638 STOPIF( hlp__realloc( &url__parm_list,
01639 url__parm_list_len*sizeof(*url__parm_list)), NULL);
01640 }
01641
01642 url__parm_list[url__parm_list_used++] = parm;
01643
01644 ex:
01645 return status;
01646 }
01647
01648
01658 int url__canonical_rev( struct url_t *url, svn_revnum_t *rev)
01659 {
01660 int status;
01661 svn_error_t *status_svn;
01662
01663
01664 status=0;
01665 status_svn=NULL;
01666 if (*rev == SVN_INVALID_REVNUM)
01667 {
01668 if (url->head_rev == SVN_INVALID_REVNUM)
01669 {
01670 BUG_ON( !url->session );
01671
01672
01673 STOPIF_SVNERR( svn_ra_get_latest_revnum,
01674 (url->session, & url->head_rev, url->pool));
01675
01676 DEBUGP("HEAD of %s is at %ld", url->url, url->head_rev);
01677 }
01678
01679 *rev=url->head_rev;
01680 }
01681
01682
01683 ex:
01684 return status;
01685 }
01686
01687
01700 int url__iterator2(svn_revnum_t *target_rev, int only_if_count,
01701 char **missing)
01702 {
01703 int status;
01704 static int last_index=-1;
01705 svn_revnum_t rev;
01706
01707
01708 status=0;
01709 if (!target_rev)
01710 {
01711 last_index=-1;
01712 goto ex;
01713 }
01714
01715
01716 while (1)
01717 {
01718 last_index++;
01719 if (last_index >= urllist_count)
01720 {
01721 DEBUGP("no more URLs.");
01722
01723 status=EOF;
01724 goto ex;
01725 }
01726
01727 current_url=urllist[last_index];
01728
01729 if (only_if_count)
01730 {
01731 if (!current_url->entry_list_count)
01732 {
01733 DEBUGP("No changes for url %s.", current_url->url);
01734 continue;
01735 }
01736 DEBUGP("%d changes for url %s.",
01737 current_url->entry_list_count, current_url->url);
01738 }
01739
01740 if (url__to_be_handled(current_url)) break;
01741 }
01742
01743 STOPIF( url__open_session(NULL, missing), NULL);
01744
01745
01746 if (current_url->current_target_override)
01747 rev=current_url->current_target_rev;
01748 else if (opt_target_revisions_given)
01749 rev=opt_target_revision;
01750 else
01751 rev=current_url->target_rev;
01752 DEBUGP("doing URL %s @ %s", current_url->url,
01753 hlp__rev_to_string(rev));
01754
01755 STOPIF( url__canonical_rev(current_url, &rev), NULL);
01756 *target_rev = rev;
01757
01758 ex:
01759 return status;
01760 }
01761