summary refs log tree commit diff
path: root/pkgs/tools/networking/swec/default.nix
blob: e65bc5e208fc06e7a783b618df48215f206c6077 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
{ fetchurl, stdenv, makeWrapper, perlPackages }:

stdenv.mkDerivation rec {
  name = "swec-0.4";

  src = fetchurl {
    url = "http://files.zerodogg.org/swec/${name}.tar.bz2";
    sha256 = "1m3971z4z1wr0paggprfz0n8ng8vsnkc9m6s3bdplgyz7qjk6jwx";
  };

  buildInputs = [ makeWrapper perlPackages.perl perlPackages.LWP perlPackages.URI perlPackages.HTMLParser ];
  checkInputs = [ perlPackages.HTTPServerSimple perlPackages.Parent ];

  configurePhase = ''
    for i in swec tests/{runTests,testServer}
    do
      sed -i "$i" -e's|/usr/bin/perl|${perlPackages.perl}/bin/perl|g'
    done
  '';

  dontBuild = true;

  installPhase = ''
    make install prefix="$out"

    mkdir -p "$out/share/${name}"
    cp -v default.sdf "$out/share/${name}"
    sed -i "$out/bin/swec" -e"s|realpath(\$0)|'$out/share/${name}/swec'|g"

    wrapProgram "$out/bin/swec" \
      --prefix PERL5LIB : ${with perlPackages; makePerlPath [ LWP URI HTMLParser ]}
  '';

  doCheck = true;
  checkPhase = "make test";

  meta = {
    homepage = "https://random.zerodogg.org/swec/";

    description = "Simple Web Error Checker (SWEC)";

    longDescription =
      '' SWEC (Simple Web Error Checker) is a program that automates testing
         of dynamic websites.  It parses each HTML file it finds for links,
         and if those links are within the site specified (ie. local, not
         external), it will check that page as well.  In this respect it
         works a lot like a crawler, in that it'll click on any link it finds
         (more notes about this later).

         In addition to parsing and locating links, it will also parse the
         pages looking for known errors and report those (such as Mason or
         PHP errors), and will report if a page can not be read (by either
         returning a 404, 500 or similar).

         Since you may often want SWEC to be logged in on your site, you have
         to be careful.  When logged in, SWEC will still click on all links
         it finds, including things like 'join group' or 'delete account'
         (though it has some magic trying to avoid the latter).  Therefore it
         is highly recommended that when you run SWEC as a logged-in user on
         a site, use a test server, not the live one.

         Running SWEC on a live site without being logged in as a user is
         perfectly fine, it won't do anything a normal crawler wouldn't do
         (well, not exactly true, SWEC will ignore robots.txt).
      '';

    license = stdenv.lib.licenses.gpl3Plus;

    maintainers = [ ];
    platforms = stdenv.lib.platforms.linux;
  };
}