wiki: updated wikiget for new wiki

git-svn-id: trunk@36349 -
This commit is contained in:
mattias 2012-03-26 18:28:31 +00:00
parent d5b401577b
commit ec018adf2d
3 changed files with 33 additions and 23 deletions

View File

@ -829,6 +829,7 @@ begin
if (CurName='pascal') if (CurName='pascal')
or (CurName='delphi') or (CurName='delphi')
or (CurName='code') or (CurName='code')
or (CurName='syntaxhighlight')
or (CurName='source') or (CurName='source')
or (CurName='fpc') or (CurName='fpc')
then then

View File

@ -469,6 +469,7 @@ begin
with Result do begin with Result do begin
Add('code',@AllwaysTrue); Add('code',@AllwaysTrue);
Add('source',@AllwaysTrue); Add('source',@AllwaysTrue);
Add('syntaxhighlight',@AllwaysTrue);
Add('pascal',@AllwaysTrue); Add('pascal',@AllwaysTrue);
Add('delphi',@AllwaysTrue); Add('delphi',@AllwaysTrue);
if AddLazWikiLangs then begin if AddLazWikiLangs then begin

View File

@ -34,12 +34,12 @@ uses
{$IF FPC_FULLVERSION<20701} {$IF FPC_FULLVERSION<20701}
myfphttpclient, myfphttpclient,
{$ELSE} {$ELSE}
fphttpclient, fphttpclient, HTTPDefs,
{$ENDIF} {$ENDIF}
WikiParser, WikiFormat; WikiParser, WikiFormat;
const const
IgnorePrefixes: array[1..11] of string = ( IgnorePrefixes: array[1..12] of string = (
'Special:', 'Special:',
'Help:', 'Help:',
'Random:', 'Random:',
@ -50,6 +50,7 @@ const
'Category:', 'Category:',
'User:', 'User:',
'User_talk:', 'User_talk:',
'Lazarus_wiki:',
'index.php' 'index.php'
); );
type type
@ -76,8 +77,8 @@ type
FAllImages: TStringToStringTree; // image name to filename FAllImages: TStringToStringTree; // image name to filename
protected protected
procedure DoRun; override; procedure DoRun; override;
procedure GetAll; procedure GetAll(Version: integer = 2; SaveTOC: boolean = false);
procedure GetRecent(Days: integer); procedure GetRecent(Days: integer; Version: integer = 2);
procedure DownloadPage(Page: string); procedure DownloadPage(Page: string);
procedure DownloadFirstNeededPage; procedure DownloadFirstNeededPage;
procedure CheckNotUsedPages(Show, Delete: boolean); procedure CheckNotUsedPages(Show, Delete: boolean);
@ -185,7 +186,7 @@ begin
if RecentDays<1 then if RecentDays<1 then
E('invalid --recent value "'+GetOptionValue('recent')+'"'); E('invalid --recent value "'+GetOptionValue('recent')+'"');
end; end;
GetAll; GetAll(2);
if RecentDays>0 then if RecentDays>0 then
GetRecent(RecentDays); GetRecent(RecentDays);
end; end;
@ -210,7 +211,7 @@ begin
Terminate; Terminate;
end; end;
procedure TWikiGet.GetAll; procedure TWikiGet.GetAll(Version: integer; SaveTOC: boolean);
var var
Client: TFPHTTPClient; Client: TFPHTTPClient;
Response: TMemoryStream; Response: TMemoryStream;
@ -222,41 +223,45 @@ var
URLs: TStringList; URLs: TStringList;
i: Integer; i: Integer;
Page: String; Page: String;
SaveTOC: Boolean;
begin begin
Client:=nil; Client:=nil;
SaveTOC:=false;
URLs:=TStringList.Create; URLs:=TStringList.Create;
SaveTOC:=false;
try try
Client:=TFPHTTPClient.Create(nil); Client:=TFPHTTPClient.Create(nil);
Response:=TMemoryStream.Create; Response:=TMemoryStream.Create;
// get list of range pages // get list of range pages
//URL:=BaseURL+'index.php?title=Special:Allpages&action=submit&namespace=0&from='; //URL:=BaseURL+'index.php?title=Special:AllPages&action=submit&namespace=0&from=';
URL:=BaseURL+'index.php?title=Special:Allpages'; if Version=1 then
URL:=BaseURL+'index.php?title=Special:Allpages'
else
URL:=BaseURL+'index.php?title=Special:AllPages';
writeln('getting page "',URL,'" ...'); writeln('getting page "',URL,'" ...');
Client.Get(URL,Response); Client.Get(URL,Response);
//Client.ResponseHeaders.SaveToFile('responseheaders.txt'); //Client.ResponseHeaders.SaveToFile('responseheaders.txt');
if SaveTOC then begin debugln(['TWikiGet.GetAll ',SaveTOC]);
Response.Position:=0;
Filename:='all.html';
writeln('saving page "',Filename,'" ...');
if not NoWrite then
Response.SaveToFile(Filename);
end;
if Response.Size>0 then begin if Response.Size>0 then begin
if SaveTOC then begin
Response.Position:=0;
Filename:='all.html';
writeln('saving page "',Filename,'" ...');
if not NoWrite then
Response.SaveToFile(Filename);
end;
Response.Position:=0; Response.Position:=0;
SetLength(s,Response.Size); SetLength(s,Response.Size);
Response.Read(s[1],length(s)); Response.Read(s[1],length(s));
repeat repeat
p:=Pos('<a href="/Special:Allpages/',s); if Version=1 then
p:=Pos('<a href="/Special:Allpages/',s)
else
p:=Pos('<a href="/index.php?title=Special:AllPages&amp;from=',s);
if p<1 then break; if p<1 then break;
inc(p,length('<a href="')); inc(p,length('<a href="'));
StartPos:=p; StartPos:=p;
while (p<=length(s)) and (s[p]<>'"') do inc(p); while (p<=length(s)) and (s[p]<>'"') do inc(p);
URL:=copy(s,StartPos,p-StartPos); URL:=XMLValueToStr(copy(s,StartPos,p-StartPos));
if (URL<>'') and (URLs.IndexOf(URL)<0) then begin; if (URL<>'') and (URLs.IndexOf(URL)<0) then begin;
//writeln('TWikiGet.GetAll URL="',URL,'"'); writeln('TWikiGet.GetAll URL="',URL,'"');
URLs.Add(URL); URLs.Add(URL);
end; end;
System.Delete(s,1,p); System.Delete(s,1,p);
@ -311,7 +316,7 @@ begin
end; end;
end; end;
procedure TWikiGet.GetRecent(Days: integer); procedure TWikiGet.GetRecent(Days: integer; Version: integer);
const const
linksstart = '<a href="/index.php?title='; linksstart = '<a href="/index.php?title=';
var var
@ -333,7 +338,10 @@ begin
try try
Client:=TFPHTTPClient.Create(nil); Client:=TFPHTTPClient.Create(nil);
Response:=TMemoryStream.Create; Response:=TMemoryStream.Create;
URL:=BaseURL+'index.php?title=Special:Recentchanges&days='+IntToStr(Days)+'&limit=500'; if Version=1 then
URL:=BaseURL+'index.php?title=Special:Recentchanges&days='+IntToStr(Days)+'&limit=500'
else
URL:=BaseURL+'index.php?title=Special:RecentChanges&days='+IntToStr(Days)+'&limit=500';
writeln('getting page "',URL,'" ...'); writeln('getting page "',URL,'" ...');
Client.Get(URL,Response); Client.Get(URL,Response);
//Client.ResponseHeaders.SaveToFile('responseheaders.txt'); //Client.ResponseHeaders.SaveToFile('responseheaders.txt');