Merge branch 'sg/line-log-tree-diff-optim'
[git/git.git] / contrib / mw-to-git / git-remote-mediawiki.perl
1 #! /usr/bin/perl
2
3 # Copyright (C) 2011
4 # Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
5 # Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
6 # Claire Fousse <claire.fousse@ensimag.imag.fr>
7 # David Amouyal <david.amouyal@ensimag.imag.fr>
8 # Matthieu Moy <matthieu.moy@grenoble-inp.fr>
9 # License: GPL v2 or later
10
11 # Gateway between Git and MediaWiki.
12 # Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
13
14 use strict;
15 use MediaWiki::API;
16 use Git;
17 use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
18 EMPTY HTTP_CODE_OK);
19 use DateTime::Format::ISO8601;
20 use warnings;
21
22 # By default, use UTF-8 to communicate with Git and the user
23 binmode STDERR, ':encoding(UTF-8)';
24 binmode STDOUT, ':encoding(UTF-8)';
25
26 use URI::Escape;
27
28 # It's not always possible to delete pages (may require some
29 # privileges). Deleted pages are replaced with this content.
30 use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
31
32 # It's not possible to create empty pages. New empty files in Git are
33 # sent with this content instead.
34 use constant EMPTY_CONTENT => "<!-- empty page -->\n";
35
36 # used to reflect file creation or deletion in diff.
37 use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
38
39 # Used on Git's side to reflect empty edit messages on the wiki
40 use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
41
42 # Number of pages taken into account at once in submodule get_mw_page_list
43 use constant SLICE_SIZE => 50;
44
45 # Number of linked mediafile to get at once in get_linked_mediafiles
46 # The query is split in small batches because of the MW API limit of
47 # the number of links to be returned (500 links max).
48 use constant BATCH_SIZE => 10;
49
50 if (@ARGV != 2) {
51 exit_error_usage();
52 }
53
54 my $remotename = $ARGV[0];
55 my $url = $ARGV[1];
56
57 # Accept both space-separated and multiple keys in config file.
58 # Spaces should be written as _ anyway because we'll use chomp.
59 my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
60 chomp(@tracked_pages);
61
62 # Just like @tracked_pages, but for MediaWiki categories.
63 my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
64 chomp(@tracked_categories);
65
66 # Just like @tracked_categories, but for MediaWiki namespaces.
67 my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
68 for (@tracked_namespaces) { s/_/ /g; }
69 chomp(@tracked_namespaces);
70
71 # Import media files on pull
72 my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
73 chomp($import_media);
74 $import_media = ($import_media eq 'true');
75
76 # Export media files on push
77 my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
78 chomp($export_media);
79 $export_media = !($export_media eq 'false');
80
81 my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
82 # Note: mwPassword is discourraged. Use the credential system instead.
83 my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
84 my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
85 chomp($wiki_login);
86 chomp($wiki_passwd);
87 chomp($wiki_domain);
88
89 # Import only last revisions (both for clone and fetch)
90 my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
91 chomp($shallow_import);
92 $shallow_import = ($shallow_import eq 'true');
93
94 # Fetch (clone and pull) by revisions instead of by pages. This behavior
95 # is more efficient when we have a wiki with lots of pages and we fetch
96 # the revisions quite often so that they concern only few pages.
97 # Possible values:
98 # - by_rev: perform one query per new revision on the remote wiki
99 # - by_page: query each tracked page for new revision
100 my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
101 if (!$fetch_strategy) {
102 $fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
103 }
104 chomp($fetch_strategy);
105 if (!$fetch_strategy) {
106 $fetch_strategy = 'by_page';
107 }
108
109 # Remember the timestamp corresponding to a revision id.
110 my %basetimestamps;
111
112 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
113 #
114 # Configurable with mediawiki.dumbPush, or per-remote with
115 # remote.<remotename>.dumbPush.
116 #
117 # This means the user will have to re-import the just-pushed
118 # revisions. On the other hand, this means that the Git revisions
119 # corresponding to MediaWiki revisions are all imported from the wiki,
120 # regardless of whether they were initially created in Git or from the
121 # web interface, hence all users will get the same history (i.e. if
122 # the push from Git to MediaWiki loses some information, everybody
123 # will get the history with information lost). If the import is
124 # deterministic, this means everybody gets the same sha1 for each
125 # MediaWiki revision.
126 my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
127 if (!$dumb_push) {
128 $dumb_push = run_git('config --get --bool mediawiki.dumbPush');
129 }
130 chomp($dumb_push);
131 $dumb_push = ($dumb_push eq 'true');
132
133 my $wiki_name = $url;
134 $wiki_name =~ s{[^/]*://}{};
135 # If URL is like http://user:password@example.com/, we clearly don't
136 # want the password in $wiki_name. While we're there, also remove user
137 # and '@' sign, to avoid author like MWUser@HTTPUser@host.com
138 $wiki_name =~ s/^.*@//;
139
140 # Commands parser
141 while (<STDIN>) {
142 chomp;
143
144 if (!parse_command($_)) {
145 last;
146 }
147
148 BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
149 # command is fully processed.
150 }
151
152 ########################## Functions ##############################
153
154 ## error handling
155 sub exit_error_usage {
156 die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
157 "parameters\n" .
158 "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
159 "module directly.\n" .
160 "This module can be used the following way:\n" .
161 "\tgit clone mediawiki://<address of a mediawiki>\n" .
162 "Then, use git commit, push and pull as with every normal git repository.\n";
163 }
164
165 sub parse_command {
166 my ($line) = @_;
167 my @cmd = split(/ /, $line);
168 if (!defined $cmd[0]) {
169 return 0;
170 }
171 if ($cmd[0] eq 'capabilities') {
172 die("Too many arguments for capabilities\n")
173 if (defined($cmd[1]));
174 mw_capabilities();
175 } elsif ($cmd[0] eq 'list') {
176 die("Too many arguments for list\n") if (defined($cmd[2]));
177 mw_list($cmd[1]);
178 } elsif ($cmd[0] eq 'import') {
179 die("Invalid argument for import\n")
180 if ($cmd[1] eq EMPTY);
181 die("Too many arguments for import\n")
182 if (defined($cmd[2]));
183 mw_import($cmd[1]);
184 } elsif ($cmd[0] eq 'option') {
185 die("Invalid arguments for option\n")
186 if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
187 die("Too many arguments for option\n")
188 if (defined($cmd[3]));
189 mw_option($cmd[1],$cmd[2]);
190 } elsif ($cmd[0] eq 'push') {
191 mw_push($cmd[1]);
192 } else {
193 print {*STDERR} "Unknown command. Aborting...\n";
194 return 0;
195 }
196 return 1;
197 }
198
199 # MediaWiki API instance, created lazily.
200 my $mediawiki;
201
202 sub fatal_mw_error {
203 my $action = shift;
204 print STDERR "fatal: could not $action.\n";
205 print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
206 if ($url =~ /^https/) {
207 print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
208 print STDERR "fatal: and the SSL certificate is correct.\n";
209 } else {
210 print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
211 }
212 print STDERR "fatal: (error " .
213 $mediawiki->{error}->{code} . ': ' .
214 $mediawiki->{error}->{details} . ")\n";
215 exit 1;
216 }
217
218 ## Functions for listing pages on the remote wiki
219 sub get_mw_tracked_pages {
220 my $pages = shift;
221 get_mw_page_list(\@tracked_pages, $pages);
222 return;
223 }
224
225 sub get_mw_page_list {
226 my $page_list = shift;
227 my $pages = shift;
228 my @some_pages = @{$page_list};
229 while (@some_pages) {
230 my $last_page = SLICE_SIZE;
231 if ($#some_pages < $last_page) {
232 $last_page = $#some_pages;
233 }
234 my @slice = @some_pages[0..$last_page];
235 get_mw_first_pages(\@slice, $pages);
236 @some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
237 }
238 return;
239 }
240
241 sub get_mw_tracked_categories {
242 my $pages = shift;
243 foreach my $category (@tracked_categories) {
244 if (index($category, ':') < 0) {
245 # Mediawiki requires the Category
246 # prefix, but let's not force the user
247 # to specify it.
248 $category = "Category:${category}";
249 }
250 my $mw_pages = $mediawiki->list( {
251 action => 'query',
252 list => 'categorymembers',
253 cmtitle => $category,
254 cmlimit => 'max' } )
255 || die $mediawiki->{error}->{code} . ': '
256 . $mediawiki->{error}->{details} . "\n";
257 foreach my $page (@{$mw_pages}) {
258 $pages->{$page->{title}} = $page;
259 }
260 }
261 return;
262 }
263
264 sub get_mw_tracked_namespaces {
265 my $pages = shift;
266 foreach my $local_namespace (sort @tracked_namespaces) {
267 my $namespace_id;
268 if ($local_namespace eq "(Main)") {
269 $namespace_id = 0;
270 } else {
271 $namespace_id = get_mw_namespace_id($local_namespace);
272 }
273 # virtual namespaces don't support allpages
274 next if !defined($namespace_id) || $namespace_id < 0;
275 my $mw_pages = $mediawiki->list( {
276 action => 'query',
277 list => 'allpages',
278 apnamespace => $namespace_id,
279 aplimit => 'max' } )
280 || die $mediawiki->{error}->{code} . ': '
281 . $mediawiki->{error}->{details} . "\n";
282 print {*STDERR} "$#{$mw_pages} found in namespace $local_namespace ($namespace_id)\n";
283 foreach my $page (@{$mw_pages}) {
284 $pages->{$page->{title}} = $page;
285 }
286 }
287 return;
288 }
289
290 sub get_mw_all_pages {
291 my $pages = shift;
292 # No user-provided list, get the list of pages from the API.
293 my $mw_pages = $mediawiki->list({
294 action => 'query',
295 list => 'allpages',
296 aplimit => 'max'
297 });
298 if (!defined($mw_pages)) {
299 fatal_mw_error("get the list of wiki pages");
300 }
301 foreach my $page (@{$mw_pages}) {
302 $pages->{$page->{title}} = $page;
303 }
304 return;
305 }
306
307 # queries the wiki for a set of pages. Meant to be used within a loop
308 # querying the wiki for slices of page list.
309 sub get_mw_first_pages {
310 my $some_pages = shift;
311 my @some_pages = @{$some_pages};
312
313 my $pages = shift;
314
315 # pattern 'page1|page2|...' required by the API
316 my $titles = join('|', @some_pages);
317
318 my $mw_pages = $mediawiki->api({
319 action => 'query',
320 titles => $titles,
321 });
322 if (!defined($mw_pages)) {
323 fatal_mw_error("query the list of wiki pages");
324 }
325 while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
326 if ($id < 0) {
327 print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
328 } else {
329 $pages->{$page->{title}} = $page;
330 }
331 }
332 return;
333 }
334
335 # Get the list of pages to be fetched according to configuration.
336 sub get_mw_pages {
337 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
338
339 print {*STDERR} "Listing pages on remote wiki...\n";
340
341 my %pages; # hash on page titles to avoid duplicates
342 my $user_defined;
343 if (@tracked_pages) {
344 $user_defined = 1;
345 # The user provided a list of pages titles, but we
346 # still need to query the API to get the page IDs.
347 get_mw_tracked_pages(\%pages);
348 }
349 if (@tracked_categories) {
350 $user_defined = 1;
351 get_mw_tracked_categories(\%pages);
352 }
353 if (@tracked_namespaces) {
354 $user_defined = 1;
355 get_mw_tracked_namespaces(\%pages);
356 }
357 if (!$user_defined) {
358 get_mw_all_pages(\%pages);
359 }
360 if ($import_media) {
361 print {*STDERR} "Getting media files for selected pages...\n";
362 if ($user_defined) {
363 get_linked_mediafiles(\%pages);
364 } else {
365 get_all_mediafiles(\%pages);
366 }
367 }
368 print {*STDERR} (scalar keys %pages) . " pages found.\n";
369 return %pages;
370 }
371
372 # usage: $out = run_git("command args");
373 # $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
374 sub run_git {
375 my $args = shift;
376 my $encoding = (shift || 'encoding(UTF-8)');
377 open(my $git, "-|:${encoding}", "git ${args}")
378 or die "Unable to fork: $!\n";
379 my $res = do {
380 local $/ = undef;
381 <$git>
382 };
383 close($git);
384
385 return $res;
386 }
387
388
389 sub get_all_mediafiles {
390 my $pages = shift;
391 # Attach list of all pages for media files from the API,
392 # they are in a different namespace, only one namespace
393 # can be queried at the same moment
394 my $mw_pages = $mediawiki->list({
395 action => 'query',
396 list => 'allpages',
397 apnamespace => get_mw_namespace_id('File'),
398 aplimit => 'max'
399 });
400 if (!defined($mw_pages)) {
401 print {*STDERR} "fatal: could not get the list of pages for media files.\n";
402 print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
403 print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
404 exit 1;
405 }
406 foreach my $page (@{$mw_pages}) {
407 $pages->{$page->{title}} = $page;
408 }
409 return;
410 }
411
412 sub get_linked_mediafiles {
413 my $pages = shift;
414 my @titles = map { $_->{title} } values(%{$pages});
415
416 my $batch = BATCH_SIZE;
417 while (@titles) {
418 if ($#titles < $batch) {
419 $batch = $#titles;
420 }
421 my @slice = @titles[0..$batch];
422
423 # pattern 'page1|page2|...' required by the API
424 my $mw_titles = join('|', @slice);
425
426 # Media files could be included or linked from
427 # a page, get all related
428 my $query = {
429 action => 'query',
430 prop => 'links|images',
431 titles => $mw_titles,
432 plnamespace => get_mw_namespace_id('File'),
433 pllimit => 'max'
434 };
435 my $result = $mediawiki->api($query);
436
437 while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
438 my @media_titles;
439 if (defined($page->{links})) {
440 my @link_titles
441 = map { $_->{title} } @{$page->{links}};
442 push(@media_titles, @link_titles);
443 }
444 if (defined($page->{images})) {
445 my @image_titles
446 = map { $_->{title} } @{$page->{images}};
447 push(@media_titles, @image_titles);
448 }
449 if (@media_titles) {
450 get_mw_page_list(\@media_titles, $pages);
451 }
452 }
453
454 @titles = @titles[($batch+1)..$#titles];
455 }
456 return;
457 }
458
459 sub get_mw_mediafile_for_page_revision {
460 # Name of the file on Wiki, with the prefix.
461 my $filename = shift;
462 my $timestamp = shift;
463 my %mediafile;
464
465 # Search if on a media file with given timestamp exists on
466 # MediaWiki. In that case download the file.
467 my $query = {
468 action => 'query',
469 prop => 'imageinfo',
470 titles => "File:${filename}",
471 iistart => $timestamp,
472 iiend => $timestamp,
473 iiprop => 'timestamp|archivename|url',
474 iilimit => 1
475 };
476 my $result = $mediawiki->api($query);
477
478 my ($fileid, $file) = each( %{$result->{query}->{pages}} );
479 # If not defined it means there is no revision of the file for
480 # given timestamp.
481 if (defined($file->{imageinfo})) {
482 $mediafile{title} = $filename;
483
484 my $fileinfo = pop(@{$file->{imageinfo}});
485 $mediafile{timestamp} = $fileinfo->{timestamp};
486 # Mediawiki::API's download function doesn't support https URLs
487 # and can't download old versions of files.
488 print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
489 $mediafile{content} = download_mw_mediafile($fileinfo->{url});
490 }
491 return %mediafile;
492 }
493
494 sub download_mw_mediafile {
495 my $download_url = shift;
496
497 my $response = $mediawiki->{ua}->get($download_url);
498 if ($response->code == HTTP_CODE_OK) {
499 # It is tempting to return
500 # $response->decoded_content({charset => "none"}), but
501 # when doing so, utf8::downgrade($content) fails with
502 # "Wide character in subroutine entry".
503 $response->decode();
504 return $response->content();
505 } else {
506 print {*STDERR} "Error downloading mediafile from :\n";
507 print {*STDERR} "URL: ${download_url}\n";
508 print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
509 exit 1;
510 }
511 }
512
513 sub get_last_local_revision {
514 # Get note regarding last mediawiki revision
515 my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
516 my @note_info = split(/ /, $note);
517
518 my $lastrevision_number;
519 if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
520 print {*STDERR} 'No previous mediawiki revision found';
521 $lastrevision_number = 0;
522 } else {
523 # Notes are formatted : mediawiki_revision: #number
524 $lastrevision_number = $note_info[1];
525 chomp($lastrevision_number);
526 print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
527 }
528 return $lastrevision_number;
529 }
530
531 # Get the last remote revision without taking in account which pages are
532 # tracked or not. This function makes a single request to the wiki thus
533 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
534 # option.
535 sub get_last_global_remote_rev {
536 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
537
538 my $query = {
539 action => 'query',
540 list => 'recentchanges',
541 prop => 'revisions',
542 rclimit => '1',
543 rcdir => 'older',
544 };
545 my $result = $mediawiki->api($query);
546 return $result->{query}->{recentchanges}[0]->{revid};
547 }
548
549 # Get the last remote revision concerning the tracked pages and the tracked
550 # categories.
551 sub get_last_remote_revision {
552 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
553
554 my %pages_hash = get_mw_pages();
555 my @pages = values(%pages_hash);
556
557 my $max_rev_num = 0;
558
559 print {*STDERR} "Getting last revision id on tracked pages...\n";
560
561 foreach my $page (@pages) {
562 my $id = $page->{pageid};
563
564 my $query = {
565 action => 'query',
566 prop => 'revisions',
567 rvprop => 'ids|timestamp',
568 pageids => $id,
569 };
570
571 my $result = $mediawiki->api($query);
572
573 my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
574
575 $basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
576
577 $max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
578 }
579
580 print {*STDERR} "Last remote revision found is $max_rev_num.\n";
581 return $max_rev_num;
582 }
583
584 # Clean content before sending it to MediaWiki
585 sub mediawiki_clean {
586 my $string = shift;
587 my $page_created = shift;
588 # Mediawiki does not allow blank space at the end of a page and ends with a single \n.
589 # This function right trims a string and adds a \n at the end to follow this rule
590 $string =~ s/\s+$//;
591 if ($string eq EMPTY && $page_created) {
592 # Creating empty pages is forbidden.
593 $string = EMPTY_CONTENT;
594 }
595 return $string."\n";
596 }
597
598 # Filter applied on MediaWiki data before adding them to Git
599 sub mediawiki_smudge {
600 my $string = shift;
601 if ($string eq EMPTY_CONTENT) {
602 $string = EMPTY;
603 }
604 # This \n is important. This is due to mediawiki's way to handle end of files.
605 return "${string}\n";
606 }
607
608 sub literal_data {
609 my ($content) = @_;
610 print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
611 return;
612 }
613
614 sub literal_data_raw {
615 # Output possibly binary content.
616 my ($content) = @_;
617 # Avoid confusion between size in bytes and in characters
618 utf8::downgrade($content);
619 binmode STDOUT, ':raw';
620 print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
621 binmode STDOUT, ':encoding(UTF-8)';
622 return;
623 }
624
625 sub mw_capabilities {
626 # Revisions are imported to the private namespace
627 # refs/mediawiki/$remotename/ by the helper and fetched into
628 # refs/remotes/$remotename later by fetch.
629 print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
630 print {*STDOUT} "import\n";
631 print {*STDOUT} "list\n";
632 print {*STDOUT} "push\n";
633 if ($dumb_push) {
634 print {*STDOUT} "no-private-update\n";
635 }
636 print {*STDOUT} "\n";
637 return;
638 }
639
640 sub mw_list {
641 # MediaWiki do not have branches, we consider one branch arbitrarily
642 # called master, and HEAD pointing to it.
643 print {*STDOUT} "? refs/heads/master\n";
644 print {*STDOUT} "\@refs/heads/master HEAD\n";
645 print {*STDOUT} "\n";
646 return;
647 }
648
649 sub mw_option {
650 print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
651 print {*STDOUT} "unsupported\n";
652 return;
653 }
654
655 sub fetch_mw_revisions_for_page {
656 my $page = shift;
657 my $id = shift;
658 my $fetch_from = shift;
659 my @page_revs = ();
660 my $query = {
661 action => 'query',
662 prop => 'revisions',
663 rvprop => 'ids',
664 rvdir => 'newer',
665 rvstartid => $fetch_from,
666 rvlimit => 500,
667 pageids => $id,
668
669 # Let MediaWiki know that we support the latest API.
670 continue => '',
671 };
672
673 my $revnum = 0;
674 # Get 500 revisions at a time due to the mediawiki api limit
675 while (1) {
676 my $result = $mediawiki->api($query);
677
678 # Parse each of those 500 revisions
679 foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
680 my $page_rev_ids;
681 $page_rev_ids->{pageid} = $page->{pageid};
682 $page_rev_ids->{revid} = $revision->{revid};
683 push(@page_revs, $page_rev_ids);
684 $revnum++;
685 }
686
687 if ($result->{'query-continue'}) { # For legacy APIs
688 $query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
689 } elsif ($result->{continue}) { # For newer APIs
690 $query->{rvstartid} = $result->{continue}->{rvcontinue};
691 $query->{continue} = $result->{continue}->{continue};
692 } else {
693 last;
694 }
695 }
696 if ($shallow_import && @page_revs) {
697 print {*STDERR} " Found 1 revision (shallow import).\n";
698 @page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
699 return $page_revs[0];
700 }
701 print {*STDERR} " Found ${revnum} revision(s).\n";
702 return @page_revs;
703 }
704
705 sub fetch_mw_revisions {
706 my $pages = shift; my @pages = @{$pages};
707 my $fetch_from = shift;
708
709 my @revisions = ();
710 my $n = 1;
711 foreach my $page (@pages) {
712 my $id = $page->{pageid};
713 print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
714 $n++;
715 my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
716 @revisions = (@page_revs, @revisions);
717 }
718
719 return ($n, @revisions);
720 }
721
722 sub fe_escape_path {
723 my $path = shift;
724 $path =~ s/\\/\\\\/g;
725 $path =~ s/"/\\"/g;
726 $path =~ s/\n/\\n/g;
727 return qq("${path}");
728 }
729
730 sub import_file_revision {
731 my $commit = shift;
732 my %commit = %{$commit};
733 my $full_import = shift;
734 my $n = shift;
735 my $mediafile = shift;
736 my %mediafile;
737 if ($mediafile) {
738 %mediafile = %{$mediafile};
739 }
740
741 my $title = $commit{title};
742 my $comment = $commit{comment};
743 my $content = $commit{content};
744 my $author = $commit{author};
745 my $date = $commit{date};
746
747 print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
748 print {*STDOUT} "mark :${n}\n";
749 print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
750 literal_data($comment);
751
752 # If it's not a clone, we need to know where to start from
753 if (!$full_import && $n == 1) {
754 print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
755 }
756 if ($content ne DELETED_CONTENT) {
757 print {*STDOUT} 'M 644 inline ' .
758 fe_escape_path("${title}.mw") . "\n";
759 literal_data($content);
760 if (%mediafile) {
761 print {*STDOUT} 'M 644 inline '
762 . fe_escape_path($mediafile{title}) . "\n";
763 literal_data_raw($mediafile{content});
764 }
765 print {*STDOUT} "\n\n";
766 } else {
767 print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
768 }
769
770 # mediawiki revision number in the git note
771 if ($full_import && $n == 1) {
772 print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
773 }
774 print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
775 print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
776 literal_data('Note added by git-mediawiki during import');
777 if (!$full_import && $n == 1) {
778 print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
779 }
780 print {*STDOUT} "N inline :${n}\n";
781 literal_data("mediawiki_revision: $commit{mw_revision}");
782 print {*STDOUT} "\n\n";
783 return;
784 }
785
786 # parse a sequence of
787 # <cmd> <arg1>
788 # <cmd> <arg2>
789 # \n
790 # (like batch sequence of import and sequence of push statements)
791 sub get_more_refs {
792 my $cmd = shift;
793 my @refs;
794 while (1) {
795 my $line = <STDIN>;
796 if ($line =~ /^$cmd (.*)$/) {
797 push(@refs, $1);
798 } elsif ($line eq "\n") {
799 return @refs;
800 } else {
801 die("Invalid command in a '$cmd' batch: $_\n");
802 }
803 }
804 return;
805 }
806
807 sub mw_import {
808 # multiple import commands can follow each other.
809 my @refs = (shift, get_more_refs('import'));
810 foreach my $ref (@refs) {
811 mw_import_ref($ref);
812 }
813 print {*STDOUT} "done\n";
814 return;
815 }
816
817 sub mw_import_ref {
818 my $ref = shift;
819 # The remote helper will call "import HEAD" and
820 # "import refs/heads/master".
821 # Since HEAD is a symbolic ref to master (by convention,
822 # followed by the output of the command "list" that we gave),
823 # we don't need to do anything in this case.
824 if ($ref eq 'HEAD') {
825 return;
826 }
827
828 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
829
830 print {*STDERR} "Searching revisions...\n";
831 my $last_local = get_last_local_revision();
832 my $fetch_from = $last_local + 1;
833 if ($fetch_from == 1) {
834 print {*STDERR} ", fetching from beginning.\n";
835 } else {
836 print {*STDERR} ", fetching from here.\n";
837 }
838
839 my $n = 0;
840 if ($fetch_strategy eq 'by_rev') {
841 print {*STDERR} "Fetching & writing export data by revs...\n";
842 $n = mw_import_ref_by_revs($fetch_from);
843 } elsif ($fetch_strategy eq 'by_page') {
844 print {*STDERR} "Fetching & writing export data by pages...\n";
845 $n = mw_import_ref_by_pages($fetch_from);
846 } else {
847 print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
848 print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
849 exit 1;
850 }
851
852 if ($fetch_from == 1 && $n == 0) {
853 print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
854 # Something has to be done remote-helper side. If nothing is done, an error is
855 # thrown saying that HEAD is referring to unknown object 0000000000000000000
856 # and the clone fails.
857 }
858 return;
859 }
860
861 sub mw_import_ref_by_pages {
862
863 my $fetch_from = shift;
864 my %pages_hash = get_mw_pages();
865 my @pages = values(%pages_hash);
866
867 my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
868
869 @revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
870 my @revision_ids = map { $_->{revid} } @revisions;
871
872 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
873 }
874
875 sub mw_import_ref_by_revs {
876
877 my $fetch_from = shift;
878 my %pages_hash = get_mw_pages();
879
880 my $last_remote = get_last_global_remote_rev();
881 my @revision_ids = $fetch_from..$last_remote;
882 return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
883 }
884
885 # Import revisions given in second argument (array of integers).
886 # Only pages appearing in the third argument (hash indexed by page titles)
887 # will be imported.
888 sub mw_import_revids {
889 my $fetch_from = shift;
890 my $revision_ids = shift;
891 my $pages = shift;
892
893 my $n = 0;
894 my $n_actual = 0;
895 my $last_timestamp = 0; # Placeholder in case $rev->timestamp is undefined
896
897 foreach my $pagerevid (@{$revision_ids}) {
898 # Count page even if we skip it, since we display
899 # $n/$total and $total includes skipped pages.
900 $n++;
901
902 # fetch the content of the pages
903 my $query = {
904 action => 'query',
905 prop => 'revisions',
906 rvprop => 'content|timestamp|comment|user|ids',
907 revids => $pagerevid,
908 };
909
910 my $result = $mediawiki->api($query);
911
912 if (!$result) {
913 die "Failed to retrieve modified page for revision $pagerevid\n";
914 }
915
916 if (defined($result->{query}->{badrevids}->{$pagerevid})) {
917 # The revision id does not exist on the remote wiki.
918 next;
919 }
920
921 if (!defined($result->{query}->{pages})) {
922 die "Invalid revision ${pagerevid}.\n";
923 }
924
925 my @result_pages = values(%{$result->{query}->{pages}});
926 my $result_page = $result_pages[0];
927 my $rev = $result_pages[0]->{revisions}->[0];
928
929 my $page_title = $result_page->{title};
930
931 if (!exists($pages->{$page_title})) {
932 print {*STDERR} "${n}/", scalar(@{$revision_ids}),
933 ": Skipping revision #$rev->{revid} of ${page_title}\n";
934 next;
935 }
936
937 $n_actual++;
938
939 my %commit;
940 $commit{author} = $rev->{user} || 'Anonymous';
941 $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
942 $commit{title} = smudge_filename($page_title);
943 $commit{mw_revision} = $rev->{revid};
944 $commit{content} = mediawiki_smudge($rev->{'*'});
945
946 if (!defined($rev->{timestamp})) {
947 $last_timestamp++;
948 } else {
949 $last_timestamp = $rev->{timestamp};
950 }
951 $commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
952
953 # Differentiates classic pages and media files.
954 my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
955 my %mediafile;
956 if ($namespace) {
957 my $id = get_mw_namespace_id($namespace);
958 if ($id && $id == get_mw_namespace_id('File')) {
959 %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
960 }
961 }
962 # If this is a revision of the media page for new version
963 # of a file do one common commit for both file and media page.
964 # Else do commit only for that page.
965 print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
966 import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
967 }
968
969 return $n_actual;
970 }
971
972 sub error_non_fast_forward {
973 my $advice = run_git('config --bool advice.pushNonFastForward');
974 chomp($advice);
975 if ($advice ne 'false') {
976 # Native git-push would show this after the summary.
977 # We can't ask it to display it cleanly, so print it
978 # ourselves before.
979 print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
980 print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
981 print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
982 }
983 print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
984 return 0;
985 }
986
987 sub mw_upload_file {
988 my $complete_file_name = shift;
989 my $new_sha1 = shift;
990 my $extension = shift;
991 my $file_deleted = shift;
992 my $summary = shift;
993 my $newrevid;
994 my $path = "File:${complete_file_name}";
995 my %hashFiles = get_allowed_file_extensions();
996 if (!exists($hashFiles{$extension})) {
997 print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
998 print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
999 return $newrevid;
1000 }
1001 # Deleting and uploading a file requires a privileged user
1002 if ($file_deleted) {
1003 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1004 my $query = {
1005 action => 'delete',
1006 title => $path,
1007 reason => $summary
1008 };
1009 if (!$mediawiki->edit($query)) {
1010 print {*STDERR} "Failed to delete file on remote wiki\n";
1011 print {*STDERR} "Check your permissions on the remote site. Error code:\n";
1012 print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
1013 exit 1;
1014 }
1015 } else {
1016 # Don't let perl try to interpret file content as UTF-8 => use "raw"
1017 my $content = run_git("cat-file blob ${new_sha1}", 'raw');
1018 if ($content ne EMPTY) {
1019 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1020 $mediawiki->{config}->{upload_url} =
1021 "${url}/index.php/Special:Upload";
1022 $mediawiki->edit({
1023 action => 'upload',
1024 filename => $complete_file_name,
1025 comment => $summary,
1026 file => [undef,
1027 $complete_file_name,
1028 Content => $content],
1029 ignorewarnings => 1,
1030 }, {
1031 skip_encoding => 1
1032 } ) || die $mediawiki->{error}->{code} . ':'
1033 . $mediawiki->{error}->{details} . "\n";
1034 my $last_file_page = $mediawiki->get_page({title => $path});
1035 $newrevid = $last_file_page->{revid};
1036 print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
1037 } else {
1038 print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
1039 }
1040 }
1041 return $newrevid;
1042 }
1043
1044 sub mw_push_file {
1045 my $diff_info = shift;
1046 # $diff_info contains a string in this format:
1047 # 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
1048 my @diff_info_split = split(/[ \t]/, $diff_info);
1049
1050 # Filename, including .mw extension
1051 my $complete_file_name = shift;
1052 # Commit message
1053 my $summary = shift;
1054 # MediaWiki revision number. Keep the previous one by default,
1055 # in case there's no edit to perform.
1056 my $oldrevid = shift;
1057 my $newrevid;
1058
1059 if ($summary eq EMPTY_MESSAGE) {
1060 $summary = EMPTY;
1061 }
1062
1063 my $new_sha1 = $diff_info_split[3];
1064 my $old_sha1 = $diff_info_split[2];
1065 my $page_created = ($old_sha1 eq NULL_SHA1);
1066 my $page_deleted = ($new_sha1 eq NULL_SHA1);
1067 $complete_file_name = clean_filename($complete_file_name);
1068
1069 my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
1070 if (!defined($extension)) {
1071 $extension = EMPTY;
1072 }
1073 if ($extension eq 'mw') {
1074 my $ns = get_mw_namespace_id_for_page($complete_file_name);
1075 if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
1076 print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
1077 return ($oldrevid, 'ok');
1078 }
1079 my $file_content;
1080 if ($page_deleted) {
1081 # Deleting a page usually requires
1082 # special privileges. A common
1083 # convention is to replace the page
1084 # with this content instead:
1085 $file_content = DELETED_CONTENT;
1086 } else {
1087 $file_content = run_git("cat-file blob ${new_sha1}");
1088 }
1089
1090 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1091
1092 my $result = $mediawiki->edit( {
1093 action => 'edit',
1094 summary => $summary,
1095 title => $title,
1096 basetimestamp => $basetimestamps{$oldrevid},
1097 text => mediawiki_clean($file_content, $page_created),
1098 }, {
1099 skip_encoding => 1 # Helps with names with accentuated characters
1100 });
1101 if (!$result) {
1102 if ($mediawiki->{error}->{code} == 3) {
1103 # edit conflicts, considered as non-fast-forward
1104 print {*STDERR} 'Warning: Error ' .
1105 $mediawiki->{error}->{code} .
1106 ' from mediawiki: ' . $mediawiki->{error}->{details} .
1107 ".\n";
1108 return ($oldrevid, 'non-fast-forward');
1109 } else {
1110 # Other errors. Shouldn't happen => just die()
1111 die 'Fatal: Error ' .
1112 $mediawiki->{error}->{code} .
1113 ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
1114 }
1115 }
1116 $newrevid = $result->{edit}->{newrevid};
1117 print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
1118 } elsif ($export_media) {
1119 $newrevid = mw_upload_file($complete_file_name, $new_sha1,
1120 $extension, $page_deleted,
1121 $summary);
1122 } else {
1123 print {*STDERR} "Ignoring media file ${title}\n";
1124 }
1125 $newrevid = ($newrevid or $oldrevid);
1126 return ($newrevid, 'ok');
1127 }
1128
1129 sub mw_push {
1130 # multiple push statements can follow each other
1131 my @refsspecs = (shift, get_more_refs('push'));
1132 my $pushed;
1133 for my $refspec (@refsspecs) {
1134 my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
1135 or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
1136 if ($force) {
1137 print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
1138 }
1139 if ($local eq EMPTY) {
1140 print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
1141 print {*STDOUT} "error ${remote} cannot delete\n";
1142 next;
1143 }
1144 if ($remote ne 'refs/heads/master') {
1145 print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
1146 print {*STDOUT} "error ${remote} only master allowed\n";
1147 next;
1148 }
1149 if (mw_push_revision($local, $remote)) {
1150 $pushed = 1;
1151 }
1152 }
1153
1154 # Notify Git that the push is done
1155 print {*STDOUT} "\n";
1156
1157 if ($pushed && $dumb_push) {
1158 print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
1159 print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
1160 print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
1161 print {*STDERR} "\n";
1162 print {*STDERR} " git pull --rebase\n";
1163 print {*STDERR} "\n";
1164 }
1165 return;
1166 }
1167
1168 sub mw_push_revision {
1169 my $local = shift;
1170 my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
1171 my $last_local_revid = get_last_local_revision();
1172 print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
1173 my $last_remote_revid = get_last_remote_revision();
1174 my $mw_revision = $last_remote_revid;
1175
1176 # Get sha1 of commit pointed by local HEAD
1177 my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
1178 chomp($HEAD_sha1);
1179 # Get sha1 of commit pointed by remotes/$remotename/master
1180 my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
1181 chomp($remoteorigin_sha1);
1182
1183 if ($last_local_revid > 0 &&
1184 $last_local_revid < $last_remote_revid) {
1185 return error_non_fast_forward($remote);
1186 }
1187
1188 if ($HEAD_sha1 eq $remoteorigin_sha1) {
1189 # nothing to push
1190 return 0;
1191 }
1192
1193 # Get every commit in between HEAD and refs/remotes/origin/master,
1194 # including HEAD and refs/remotes/origin/master
1195 my @commit_pairs = ();
1196 if ($last_local_revid > 0) {
1197 my $parsed_sha1 = $remoteorigin_sha1;
1198 # Find a path from last MediaWiki commit to pushed commit
1199 print {*STDERR} "Computing path from local to remote ...\n";
1200 my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
1201 my %local_ancestry;
1202 foreach my $line (@local_ancestry) {
1203 if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
1204 foreach my $parent (split(/ /, $parents)) {
1205 $local_ancestry{$parent} = $child;
1206 }
1207 } elsif (!$line =~ /^([a-f0-9]+)/) {
1208 die "Unexpected output from git rev-list: ${line}\n";
1209 }
1210 }
1211 while ($parsed_sha1 ne $HEAD_sha1) {
1212 my $child = $local_ancestry{$parsed_sha1};
1213 if (!$child) {
1214 print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
1215 return error_non_fast_forward($remote);
1216 }
1217 push(@commit_pairs, [$parsed_sha1, $child]);
1218 $parsed_sha1 = $child;
1219 }
1220 } else {
1221 # No remote mediawiki revision. Export the whole
1222 # history (linearized with --first-parent)
1223 print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
1224 my $history = run_git("rev-list --first-parent --children ${local}");
1225 my @history = split(/\n/, $history);
1226 @history = @history[1..$#history];
1227 foreach my $line (reverse @history) {
1228 my @commit_info_split = split(/[ \n]/, $line);
1229 push(@commit_pairs, \@commit_info_split);
1230 }
1231 }
1232
1233 foreach my $commit_info_split (@commit_pairs) {
1234 my $sha1_child = @{$commit_info_split}[0];
1235 my $sha1_commit = @{$commit_info_split}[1];
1236 my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
1237 # TODO: we could detect rename, and encode them with a #redirect on the wiki.
1238 # TODO: for now, it's just a delete+add
1239 my @diff_info_list = split(/\0/, $diff_infos);
1240 # Keep the subject line of the commit message as mediawiki comment for the revision
1241 my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
1242 chomp($commit_msg);
1243 # Push every blob
1244 while (@diff_info_list) {
1245 my $status;
1246 # git diff-tree -z gives an output like
1247 # <metadata>\0<filename1>\0
1248 # <metadata>\0<filename2>\0
1249 # and we've split on \0.
1250 my $info = shift(@diff_info_list);
1251 my $file = shift(@diff_info_list);
1252 ($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
1253 if ($status eq 'non-fast-forward') {
1254 # we may already have sent part of the
1255 # commit to MediaWiki, but it's too
1256 # late to cancel it. Stop the push in
1257 # the middle, but still give an
1258 # accurate error message.
1259 return error_non_fast_forward($remote);
1260 }
1261 if ($status ne 'ok') {
1262 die("Unknown error from mw_push_file()\n");
1263 }
1264 }
1265 if (!$dumb_push) {
1266 run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
1267 }
1268 }
1269
1270 print {*STDOUT} "ok ${remote}\n";
1271 return 1;
1272 }
1273
1274 sub get_allowed_file_extensions {
1275 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1276
1277 my $query = {
1278 action => 'query',
1279 meta => 'siteinfo',
1280 siprop => 'fileextensions'
1281 };
1282 my $result = $mediawiki->api($query);
1283 my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
1284 my %hashFile = map { $_ => 1 } @file_extensions;
1285
1286 return %hashFile;
1287 }
1288
1289 # In memory cache for MediaWiki namespace ids.
1290 my %namespace_id;
1291
1292 # Namespaces whose id is cached in the configuration file
1293 # (to avoid duplicates)
1294 my %cached_mw_namespace_id;
1295
1296 # Return MediaWiki id for a canonical namespace name.
1297 # Ex.: "File", "Project".
1298 sub get_mw_namespace_id {
1299 $mediawiki = connect_maybe($mediawiki, $remotename, $url);
1300 my $name = shift;
1301
1302 if (!exists $namespace_id{$name}) {
1303 # Look at configuration file, if the record for that namespace is
1304 # already cached. Namespaces are stored in form:
1305 # "Name_of_namespace:Id_namespace", ex.: "File:6".
1306 my @temp = split(/\n/,
1307 run_git("config --get-all remote.${remotename}.namespaceCache"));
1308 chomp(@temp);
1309 foreach my $ns (@temp) {
1310 my ($n, $id) = split(/:/, $ns);
1311 if ($id eq 'notANameSpace') {
1312 $namespace_id{$n} = {is_namespace => 0};
1313 } else {
1314 $namespace_id{$n} = {is_namespace => 1, id => $id};
1315 }
1316 $cached_mw_namespace_id{$n} = 1;
1317 }
1318 }
1319
1320 if (!exists $namespace_id{$name}) {
1321 print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
1322 # NS not found => get namespace id from MW and store it in
1323 # configuration file.
1324 my $query = {
1325 action => 'query',
1326 meta => 'siteinfo',
1327 siprop => 'namespaces'
1328 };
1329 my $result = $mediawiki->api($query);
1330
1331 while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
1332 if (defined($ns->{id}) && defined($ns->{canonical})) {
1333 $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
1334 if ($ns->{'*'}) {
1335 # alias (e.g. french Fichier: as alias for canonical File:)
1336 $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
1337 }
1338 }
1339 }
1340 }
1341
1342 my $ns = $namespace_id{$name};
1343 my $id;
1344
1345 if (!defined $ns) {
1346 my @namespaces = map { s/ /_/g; $_; } sort keys %namespace_id;
1347 print {*STDERR} "No such namespace ${name} on MediaWiki, known namespaces: @namespaces\n";
1348 $ns = {is_namespace => 0};
1349 $namespace_id{$name} = $ns;
1350 }
1351
1352 if ($ns->{is_namespace}) {
1353 $id = $ns->{id};
1354 }
1355
1356 # Store "notANameSpace" as special value for inexisting namespaces
1357 my $store_id = ($id || 'notANameSpace');
1358
1359 # Store explicitly requested namespaces on disk
1360 if (!exists $cached_mw_namespace_id{$name}) {
1361 run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
1362 $cached_mw_namespace_id{$name} = 1;
1363 }
1364 return $id;
1365 }
1366
1367 sub get_mw_namespace_id_for_page {
1368 my $namespace = shift;
1369 if ($namespace =~ /^([^:]*):/) {
1370 return get_mw_namespace_id($namespace);
1371 } else {
1372 return;
1373 }
1374 }