Build: Remove old files replaced by gradle
This change removes files that are no longer needed with the gradle build. The license checker was already rewritten in groovy. The plugin descriptor template exists in buildSrc resources. log4j properties was moved to the test framework. site_en.xml seems to be a legacy file, there are no references to it anywhere in the maven build that I could find. The update lucene script was just a helper for running the license check in update mode, but that can be done with gradle using the updateShas command. Finally, there was a leftover build.gradle from when I attempted to make dev-tools a project of its own.
This commit is contained in:
parent
bc0a840a9c
commit
b9976ada99
|
@ -1,12 +0,0 @@
|
||||||
apply plugin: 'groovy'
|
|
||||||
|
|
||||||
repositories {
|
|
||||||
mavenCentral()
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
compile gradleApi()
|
|
||||||
compile localGroovy()
|
|
||||||
//compile group: 'com.carrotsearch.randomizedtesting', name: 'junit4-ant', version: '2.1.16'
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,263 +0,0 @@
|
||||||
#!/usr/bin/env perl
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use warnings;
|
|
||||||
use 5.010_000;
|
|
||||||
|
|
||||||
use FindBin qw($RealBin);
|
|
||||||
use lib "$RealBin/lib";
|
|
||||||
use File::Spec();
|
|
||||||
use File::Temp 0.2304 ();
|
|
||||||
use File::Find();
|
|
||||||
use File::Basename qw(basename);
|
|
||||||
use Archive::Extract();
|
|
||||||
use Digest::SHA();
|
|
||||||
$Archive::Extract::PREFER_BIN = 1;
|
|
||||||
|
|
||||||
my $mode = shift(@ARGV) || "";
|
|
||||||
die usage() unless $mode =~ /^--(check|update)$/;
|
|
||||||
|
|
||||||
my $License_Dir = shift(@ARGV) || die usage();
|
|
||||||
my $Source = shift(@ARGV) || die usage();
|
|
||||||
my $Ignore = shift(@ARGV) || '';
|
|
||||||
my $ignore
|
|
||||||
= $Ignore
|
|
||||||
? qr/${Ignore}[^\/]*$/
|
|
||||||
: qr/elasticsearch[^\/]*$/;
|
|
||||||
|
|
||||||
$License_Dir = File::Spec->rel2abs($License_Dir) . '/';
|
|
||||||
$Source = File::Spec->rel2abs($Source);
|
|
||||||
|
|
||||||
say "LICENSE DIR: $License_Dir";
|
|
||||||
say "SOURCE: $Source";
|
|
||||||
say "IGNORE: $Ignore";
|
|
||||||
|
|
||||||
die "License dir is not a directory: $License_Dir\n" . usage()
|
|
||||||
unless -d $License_Dir;
|
|
||||||
|
|
||||||
my %shas
|
|
||||||
= -f $Source ? jars_from_zip( $Source, $ignore )
|
|
||||||
: -d $Source ? jars_from_dir( $Source, $ignore )
|
|
||||||
: die "Source is neither a directory nor a zip file: $Source" . usage();
|
|
||||||
|
|
||||||
$mode eq '--check'
|
|
||||||
? exit check_shas_and_licenses(%shas)
|
|
||||||
: exit write_shas(%shas);
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub check_shas_and_licenses {
|
|
||||||
#===================================
|
|
||||||
my %new = @_;
|
|
||||||
|
|
||||||
my %old = get_sha_files();
|
|
||||||
my %licenses = get_files_with('LICENSE');
|
|
||||||
my %notices = get_files_with('NOTICE');
|
|
||||||
|
|
||||||
my $error = 0;
|
|
||||||
my $sha_error = 0;
|
|
||||||
|
|
||||||
for my $jar ( sort keys %new ) {
|
|
||||||
my $old_sha = delete $old{$jar};
|
|
||||||
unless ($old_sha) {
|
|
||||||
say STDERR "$jar: SHA is missing";
|
|
||||||
$error++;
|
|
||||||
$sha_error++;
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
unless ( $old_sha eq $new{$jar} ) {
|
|
||||||
say STDERR
|
|
||||||
"$jar: SHA has changed, expected $old_sha but found $new{$jar}";
|
|
||||||
$error++;
|
|
||||||
$sha_error++;
|
|
||||||
next;
|
|
||||||
}
|
|
||||||
|
|
||||||
my $license_found;
|
|
||||||
my $notice_found;
|
|
||||||
my $prefix = $jar;
|
|
||||||
$prefix =~ s/\.sha1//;
|
|
||||||
|
|
||||||
while ( $prefix =~ s/-[^\-]+$// ) {
|
|
||||||
if ( exists $licenses{$prefix} ) {
|
|
||||||
$license_found = 1;
|
|
||||||
|
|
||||||
# mark all licenses with the same prefix as used
|
|
||||||
for ( keys %licenses ) {
|
|
||||||
$licenses{$_}++ if index( $prefix, $_ ) == 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( exists $notices{$prefix} ) {
|
|
||||||
$notices{$prefix}++;
|
|
||||||
$notice_found = 1;
|
|
||||||
}
|
|
||||||
last;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
unless ($license_found) {
|
|
||||||
say STDERR "$jar: LICENSE is missing";
|
|
||||||
$error++;
|
|
||||||
$sha_error++;
|
|
||||||
}
|
|
||||||
unless ($notice_found) {
|
|
||||||
say STDERR "$jar: NOTICE is missing";
|
|
||||||
$error++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( keys %old ) {
|
|
||||||
say STDERR "Extra SHA files present for: " . join ", ", sort keys %old;
|
|
||||||
$error++;
|
|
||||||
}
|
|
||||||
|
|
||||||
my @unused_licenses = grep { !$licenses{$_} } keys %licenses;
|
|
||||||
if (@unused_licenses) {
|
|
||||||
$error++;
|
|
||||||
say STDERR "Extra LICENCE file present: " . join ", ",
|
|
||||||
sort @unused_licenses;
|
|
||||||
}
|
|
||||||
|
|
||||||
my @unused_notices = grep { !$notices{$_} } keys %notices;
|
|
||||||
if (@unused_notices) {
|
|
||||||
$error++;
|
|
||||||
say STDERR "Extra NOTICE file present: " . join ", ",
|
|
||||||
sort @unused_notices;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($sha_error) {
|
|
||||||
say STDERR <<"SHAS"
|
|
||||||
|
|
||||||
You can update the SHA files by running:
|
|
||||||
|
|
||||||
$0 --update $License_Dir $Source $Ignore
|
|
||||||
|
|
||||||
SHAS
|
|
||||||
}
|
|
||||||
say("All SHAs and licenses OK") unless $error;
|
|
||||||
return $error;
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub write_shas {
|
|
||||||
#===================================
|
|
||||||
my %new = @_;
|
|
||||||
my %old = get_sha_files();
|
|
||||||
|
|
||||||
for my $jar ( sort keys %new ) {
|
|
||||||
if ( $old{$jar} ) {
|
|
||||||
next if $old{$jar} eq $new{$jar};
|
|
||||||
say "Updating $jar";
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
say "Adding $jar";
|
|
||||||
}
|
|
||||||
open my $fh, '>', $License_Dir . $jar or die $!;
|
|
||||||
say $fh $new{$jar} or die $!;
|
|
||||||
close $fh or die $!;
|
|
||||||
}
|
|
||||||
continue {
|
|
||||||
delete $old{$jar};
|
|
||||||
}
|
|
||||||
|
|
||||||
for my $jar ( sort keys %old ) {
|
|
||||||
say "Deleting $jar";
|
|
||||||
unlink $License_Dir . $jar or die $!;
|
|
||||||
}
|
|
||||||
say "SHAs updated";
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub get_files_with {
|
|
||||||
#===================================
|
|
||||||
my $pattern = shift;
|
|
||||||
my %files;
|
|
||||||
for my $path ( grep {-f} glob("$License_Dir/*$pattern*") ) {
|
|
||||||
my ($file) = ( $path =~ m{([^/]+)-${pattern}.*$} );
|
|
||||||
$files{$file} = 0;
|
|
||||||
}
|
|
||||||
return %files;
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub get_sha_files {
|
|
||||||
#===================================
|
|
||||||
my %shas;
|
|
||||||
|
|
||||||
die "Missing directory: $License_Dir\n"
|
|
||||||
unless -d $License_Dir;
|
|
||||||
|
|
||||||
for my $file ( grep {-f} glob("$License_Dir/*.sha1") ) {
|
|
||||||
my ($jar) = ( $file =~ m{([^/]+)$} );
|
|
||||||
open my $fh, '<', $file or die $!;
|
|
||||||
my $sha = <$fh>;
|
|
||||||
$sha ||= '';
|
|
||||||
chomp $sha;
|
|
||||||
$shas{$jar} = $sha;
|
|
||||||
}
|
|
||||||
return %shas;
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub jars_from_zip {
|
|
||||||
#===================================
|
|
||||||
my ( $source, $ignore ) = @_;
|
|
||||||
my $temp_dir = File::Temp->newdir;
|
|
||||||
my $dir_name = $temp_dir->dirname;
|
|
||||||
my $archive = Archive::Extract->new( archive => $source, type => 'zip' );
|
|
||||||
$archive->extract( to => $dir_name ) || die $archive->error;
|
|
||||||
my @jars = map { File::Spec->rel2abs( $_, $dir_name ) }
|
|
||||||
grep { /\.jar$/ && !/$ignore/ } @{ $archive->files };
|
|
||||||
return calculate_shas(@jars);
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub jars_from_dir {
|
|
||||||
#===================================
|
|
||||||
my ( $source, $ignore ) = @_;
|
|
||||||
my @jars;
|
|
||||||
File::Find::find(
|
|
||||||
{ wanted => sub {
|
|
||||||
push @jars, File::Spec->rel2abs( $_, $source )
|
|
||||||
if /\.jar$/ && !/$ignore/;
|
|
||||||
},
|
|
||||||
no_chdir => 1
|
|
||||||
},
|
|
||||||
$source
|
|
||||||
);
|
|
||||||
return calculate_shas(@jars);
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub calculate_shas {
|
|
||||||
#===================================
|
|
||||||
my %shas;
|
|
||||||
while ( my $file = shift() ) {
|
|
||||||
my $digest = eval { Digest::SHA->new(1)->addfile($file) }
|
|
||||||
or die "Error calculating SHA1 for <$file>: $!\n";
|
|
||||||
$shas{ basename($file) . ".sha1" } = $digest->hexdigest;
|
|
||||||
}
|
|
||||||
return %shas;
|
|
||||||
}
|
|
||||||
|
|
||||||
#===================================
|
|
||||||
sub usage {
|
|
||||||
#===================================
|
|
||||||
return <<"USAGE";
|
|
||||||
|
|
||||||
USAGE:
|
|
||||||
|
|
||||||
# check the sha1 and LICENSE files for each jar in the zip or directory
|
|
||||||
$0 --check path/to/licenses/ path/to/package.zip [prefix_to_ignore]
|
|
||||||
$0 --check path/to/licenses/ path/to/dir/ [prefix_to_ignore]
|
|
||||||
|
|
||||||
# updates the sha1s for each jar in the zip or directory
|
|
||||||
$0 --update path/to/licenses/ path/to/package.zip [prefix_to_ignore]
|
|
||||||
$0 --update path/to/licenses/ path/to/dir/ [prefix_to_ignore]
|
|
||||||
|
|
||||||
The optional prefix_to_ignore parameter defaults to "elasticsearch".
|
|
||||||
|
|
||||||
USAGE
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,131 +0,0 @@
|
||||||
package Archive::Zip::BufferedFileHandle;
|
|
||||||
|
|
||||||
# File handle that uses a string internally and can seek
|
|
||||||
# This is given as a demo for getting a zip file written
|
|
||||||
# to a string.
|
|
||||||
# I probably should just use IO::Scalar instead.
|
|
||||||
# Ned Konz, March 2000
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use IO::File;
|
|
||||||
use Carp;
|
|
||||||
|
|
||||||
use vars qw{$VERSION};
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
$VERSION = eval $VERSION;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub new {
|
|
||||||
my $class = shift || __PACKAGE__;
|
|
||||||
$class = ref($class) || $class;
|
|
||||||
my $self = bless(
|
|
||||||
{
|
|
||||||
content => '',
|
|
||||||
position => 0,
|
|
||||||
size => 0
|
|
||||||
},
|
|
||||||
$class
|
|
||||||
);
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Utility method to read entire file
|
|
||||||
sub readFromFile {
|
|
||||||
my $self = shift;
|
|
||||||
my $fileName = shift;
|
|
||||||
my $fh = IO::File->new($fileName, "r");
|
|
||||||
CORE::binmode($fh);
|
|
||||||
if (!$fh) {
|
|
||||||
Carp::carp("Can't open $fileName: $!\n");
|
|
||||||
return undef;
|
|
||||||
}
|
|
||||||
local $/ = undef;
|
|
||||||
$self->{content} = <$fh>;
|
|
||||||
$self->{size} = length($self->{content});
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub contents {
|
|
||||||
my $self = shift;
|
|
||||||
if (@_) {
|
|
||||||
$self->{content} = shift;
|
|
||||||
$self->{size} = length($self->{content});
|
|
||||||
}
|
|
||||||
return $self->{content};
|
|
||||||
}
|
|
||||||
|
|
||||||
sub binmode { 1 }
|
|
||||||
|
|
||||||
sub close { 1 }
|
|
||||||
|
|
||||||
sub opened { 1 }
|
|
||||||
|
|
||||||
sub eof {
|
|
||||||
my $self = shift;
|
|
||||||
return $self->{position} >= $self->{size};
|
|
||||||
}
|
|
||||||
|
|
||||||
sub seek {
|
|
||||||
my $self = shift;
|
|
||||||
my $pos = shift;
|
|
||||||
my $whence = shift;
|
|
||||||
|
|
||||||
# SEEK_SET
|
|
||||||
if ($whence == 0) { $self->{position} = $pos; }
|
|
||||||
|
|
||||||
# SEEK_CUR
|
|
||||||
elsif ($whence == 1) { $self->{position} += $pos; }
|
|
||||||
|
|
||||||
# SEEK_END
|
|
||||||
elsif ($whence == 2) { $self->{position} = $self->{size} + $pos; }
|
|
||||||
else { return 0; }
|
|
||||||
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub tell { return shift->{position}; }
|
|
||||||
|
|
||||||
# Copy my data to given buffer
|
|
||||||
sub read {
|
|
||||||
my $self = shift;
|
|
||||||
my $buf = \($_[0]);
|
|
||||||
shift;
|
|
||||||
my $len = shift;
|
|
||||||
my $offset = shift || 0;
|
|
||||||
|
|
||||||
$$buf = '' if not defined($$buf);
|
|
||||||
my $bytesRead =
|
|
||||||
($self->{position} + $len > $self->{size})
|
|
||||||
? ($self->{size} - $self->{position})
|
|
||||||
: $len;
|
|
||||||
substr($$buf, $offset, $bytesRead) =
|
|
||||||
substr($self->{content}, $self->{position}, $bytesRead);
|
|
||||||
$self->{position} += $bytesRead;
|
|
||||||
return $bytesRead;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Copy given buffer to me
|
|
||||||
sub write {
|
|
||||||
my $self = shift;
|
|
||||||
my $buf = \($_[0]);
|
|
||||||
shift;
|
|
||||||
my $len = shift;
|
|
||||||
my $offset = shift || 0;
|
|
||||||
|
|
||||||
$$buf = '' if not defined($$buf);
|
|
||||||
my $bufLen = length($$buf);
|
|
||||||
my $bytesWritten =
|
|
||||||
($offset + $len > $bufLen)
|
|
||||||
? $bufLen - $offset
|
|
||||||
: $len;
|
|
||||||
substr($self->{content}, $self->{position}, $bytesWritten) =
|
|
||||||
substr($$buf, $offset, $bytesWritten);
|
|
||||||
$self->{size} = length($self->{content});
|
|
||||||
return $bytesWritten;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub clearerr() { 1 }
|
|
||||||
|
|
||||||
1;
|
|
|
@ -1,80 +0,0 @@
|
||||||
package Archive::Zip::DirectoryMember;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use File::Path;
|
|
||||||
|
|
||||||
use vars qw( $VERSION @ISA );
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
@ISA = qw( Archive::Zip::Member );
|
|
||||||
}
|
|
||||||
|
|
||||||
use Archive::Zip qw(
|
|
||||||
:ERROR_CODES
|
|
||||||
:UTILITY_METHODS
|
|
||||||
);
|
|
||||||
|
|
||||||
sub _newNamed {
|
|
||||||
my $class = shift;
|
|
||||||
my $fileName = shift; # FS name
|
|
||||||
my $newName = shift; # Zip name
|
|
||||||
$newName = _asZipDirName($fileName) unless $newName;
|
|
||||||
my $self = $class->new(@_);
|
|
||||||
$self->{'externalFileName'} = $fileName;
|
|
||||||
$self->fileName($newName);
|
|
||||||
|
|
||||||
if (-e $fileName) {
|
|
||||||
|
|
||||||
# -e does NOT do a full stat, so we need to do one now
|
|
||||||
if (-d _ ) {
|
|
||||||
my @stat = stat(_);
|
|
||||||
$self->unixFileAttributes($stat[2]);
|
|
||||||
my $mod_t = $stat[9];
|
|
||||||
if ($^O eq 'MSWin32' and !$mod_t) {
|
|
||||||
$mod_t = time();
|
|
||||||
}
|
|
||||||
$self->setLastModFileDateTimeFromUnix($mod_t);
|
|
||||||
|
|
||||||
} else { # hmm.. trying to add a non-directory?
|
|
||||||
_error($fileName, ' exists but is not a directory');
|
|
||||||
return undef;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
$self->unixFileAttributes($self->DEFAULT_DIRECTORY_PERMISSIONS);
|
|
||||||
$self->setLastModFileDateTimeFromUnix(time());
|
|
||||||
}
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub externalFileName {
|
|
||||||
shift->{'externalFileName'};
|
|
||||||
}
|
|
||||||
|
|
||||||
sub isDirectory {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub extractToFileNamed {
|
|
||||||
my $self = shift;
|
|
||||||
my $name = shift; # local FS name
|
|
||||||
my $attribs = $self->unixFileAttributes() & 07777;
|
|
||||||
mkpath($name, 0, $attribs); # croaks on error
|
|
||||||
utime($self->lastModTime(), $self->lastModTime(), $name);
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub fileName {
|
|
||||||
my $self = shift;
|
|
||||||
my $newName = shift;
|
|
||||||
$newName =~ s{/?$}{/} if defined($newName);
|
|
||||||
return $self->SUPER::fileName($newName);
|
|
||||||
}
|
|
||||||
|
|
||||||
# So people don't get too confused. This way it looks like the problem
|
|
||||||
# is in their code...
|
|
||||||
sub contents {
|
|
||||||
return wantarray ? (undef, AZ_OK) : undef;
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
|
@ -1,344 +0,0 @@
|
||||||
=head1 NAME
|
|
||||||
|
|
||||||
Archive::Zip::FAQ - Answers to a few frequently asked questions about Archive::Zip
|
|
||||||
|
|
||||||
=head1 DESCRIPTION
|
|
||||||
|
|
||||||
It seems that I keep answering the same questions over and over again. I
|
|
||||||
assume that this is because my documentation is deficient, rather than that
|
|
||||||
people don't read the documentation.
|
|
||||||
|
|
||||||
So this FAQ is an attempt to cut down on the number of personal answers I have
|
|
||||||
to give. At least I can now say "You I<did> read the FAQ, right?".
|
|
||||||
|
|
||||||
The questions are not in any particular order. The answers assume the current
|
|
||||||
version of Archive::Zip; some of the answers depend on newly added/fixed
|
|
||||||
functionality.
|
|
||||||
|
|
||||||
=head1 Install problems on RedHat 8 or 9 with Perl 5.8.0
|
|
||||||
|
|
||||||
B<Q:> Archive::Zip won't install on my RedHat 9 system! It's broke!
|
|
||||||
|
|
||||||
B<A:> This has become something of a FAQ.
|
|
||||||
Basically, RedHat broke some versions of Perl by setting LANG to UTF8.
|
|
||||||
They apparently have a fixed version out as an update.
|
|
||||||
|
|
||||||
You might try running CPAN or creating your Makefile after exporting the LANG
|
|
||||||
environment variable as
|
|
||||||
|
|
||||||
C<LANG=C>
|
|
||||||
|
|
||||||
L<https://bugzilla.redhat.com/bugzilla/show_bug.cgi?id=87682>
|
|
||||||
|
|
||||||
=head1 Why is my zip file so big?
|
|
||||||
|
|
||||||
B<Q:> My zip file is actually bigger than what I stored in it! Why?
|
|
||||||
|
|
||||||
B<A:> Some things to make sure of:
|
|
||||||
|
|
||||||
=over 4
|
|
||||||
|
|
||||||
=item Make sure that you are requesting COMPRESSION_DEFLATED if you are storing strings.
|
|
||||||
|
|
||||||
$member->desiredCompressionMethod( COMPRESSION_DEFLATED );
|
|
||||||
|
|
||||||
=item Don't make lots of little files if you can help it.
|
|
||||||
|
|
||||||
Since zip computes the compression tables for each member, small
|
|
||||||
members without much entropy won't compress well. Instead, if you've
|
|
||||||
got lots of repeated strings in your data, try to combine them into
|
|
||||||
one big member.
|
|
||||||
|
|
||||||
=item Make sure that you are requesting COMPRESSION_STORED if you are storing things that are already compressed.
|
|
||||||
|
|
||||||
If you're storing a .zip, .jpg, .mp3, or other compressed file in a zip,
|
|
||||||
then don't compress them again. They'll get bigger.
|
|
||||||
|
|
||||||
=back
|
|
||||||
|
|
||||||
=head1 Sample code?
|
|
||||||
|
|
||||||
B<Q:> Can you send me code to do (whatever)?
|
|
||||||
|
|
||||||
B<A:> Have you looked in the C<examples/> directory yet? It contains:
|
|
||||||
|
|
||||||
=over 4
|
|
||||||
|
|
||||||
=item examples/calcSizes.pl -- How to find out how big a Zip file will be before writing it
|
|
||||||
|
|
||||||
=item examples/copy.pl -- Copies one Zip file to another
|
|
||||||
|
|
||||||
=item examples/extract.pl -- extract file(s) from a Zip
|
|
||||||
|
|
||||||
=item examples/mailZip.pl -- make and mail a zip file
|
|
||||||
|
|
||||||
=item examples/mfh.pl -- demo for use of MockFileHandle
|
|
||||||
|
|
||||||
=item examples/readScalar.pl -- shows how to use IO::Scalar as the source of a Zip read
|
|
||||||
|
|
||||||
=item examples/selfex.pl -- a brief example of a self-extracting Zip
|
|
||||||
|
|
||||||
=item examples/unzipAll.pl -- uses Archive::Zip::Tree to unzip an entire Zip
|
|
||||||
|
|
||||||
=item examples/updateZip.pl -- shows how to read/modify/write a Zip
|
|
||||||
|
|
||||||
=item examples/updateTree.pl -- shows how to update a Zip in place
|
|
||||||
|
|
||||||
=item examples/writeScalar.pl -- shows how to use IO::Scalar as the destination of a Zip write
|
|
||||||
|
|
||||||
=item examples/writeScalar2.pl -- shows how to use IO::String as the destination of a Zip write
|
|
||||||
|
|
||||||
=item examples/zip.pl -- Constructs a Zip file
|
|
||||||
|
|
||||||
=item examples/zipcheck.pl -- One way to check a Zip file for validity
|
|
||||||
|
|
||||||
=item examples/zipinfo.pl -- Prints out information about a Zip archive file
|
|
||||||
|
|
||||||
=item examples/zipGrep.pl -- Searches for text in Zip files
|
|
||||||
|
|
||||||
=item examples/ziptest.pl -- Lists a Zip file and checks member CRCs
|
|
||||||
|
|
||||||
=item examples/ziprecent.pl -- Puts recent files into a zipfile
|
|
||||||
|
|
||||||
=item examples/ziptest.pl -- Another way to check a Zip file for validity
|
|
||||||
|
|
||||||
=back
|
|
||||||
|
|
||||||
=head1 Can't Read/modify/write same Zip file
|
|
||||||
|
|
||||||
B<Q:> Why can't I open a Zip file, add a member, and write it back? I get an
|
|
||||||
error message when I try.
|
|
||||||
|
|
||||||
B<A:> Because Archive::Zip doesn't (and can't, generally) read file contents into memory,
|
|
||||||
the original Zip file is required to stay around until the writing of the new
|
|
||||||
file is completed.
|
|
||||||
|
|
||||||
The best way to do this is to write the Zip to a temporary file and then
|
|
||||||
rename the temporary file to have the old name (possibly after deleting the
|
|
||||||
old one).
|
|
||||||
|
|
||||||
Archive::Zip v1.02 added the archive methods C<overwrite()> and
|
|
||||||
C<overwriteAs()> to do this simply and carefully.
|
|
||||||
|
|
||||||
See C<examples/updateZip.pl> for an example of this technique.
|
|
||||||
|
|
||||||
=head1 File creation time not set
|
|
||||||
|
|
||||||
B<Q:> Upon extracting files, I see that their modification (and access) times are
|
|
||||||
set to the time in the Zip archive. However, their creation time is not set to
|
|
||||||
the same time. Why?
|
|
||||||
|
|
||||||
B<A:> Mostly because Perl doesn't give cross-platform access to I<creation time>.
|
|
||||||
Indeed, many systems (like Unix) don't support such a concept.
|
|
||||||
However, if yours does, you can easily set it. Get the modification time from
|
|
||||||
the member using C<lastModTime()>.
|
|
||||||
|
|
||||||
=head1 Can't use Archive::Zip on gzip files
|
|
||||||
|
|
||||||
B<Q:> Can I use Archive::Zip to extract Unix gzip files?
|
|
||||||
|
|
||||||
B<A:> No.
|
|
||||||
|
|
||||||
There is a distinction between Unix gzip files, and Zip archives that
|
|
||||||
also can use the gzip compression.
|
|
||||||
|
|
||||||
Depending on the format of the gzip file, you can use L<Compress::Raw::Zlib>, or
|
|
||||||
L<Archive::Tar> to decompress it (and de-archive it in the case of Tar files).
|
|
||||||
|
|
||||||
You can unzip PKZIP/WinZip/etc/ archives using Archive::Zip (that's what
|
|
||||||
it's for) as long as any compressed members are compressed using
|
|
||||||
Deflate compression.
|
|
||||||
|
|
||||||
=head1 Add a directory/tree to a Zip
|
|
||||||
|
|
||||||
B<Q:> How can I add a directory (or tree) full of files to a Zip?
|
|
||||||
|
|
||||||
B<A:> You can use the Archive::Zip::addTree*() methods:
|
|
||||||
|
|
||||||
use Archive::Zip;
|
|
||||||
my $zip = Archive::Zip->new();
|
|
||||||
# add all readable files and directories below . as xyz/*
|
|
||||||
$zip->addTree( '.', 'xyz' );
|
|
||||||
# add all readable plain files below /abc as def/*
|
|
||||||
$zip->addTree( '/abc', 'def', sub { -f && -r } );
|
|
||||||
# add all .c files below /tmp as stuff/*
|
|
||||||
$zip->addTreeMatching( '/tmp', 'stuff', '\.c$' );
|
|
||||||
# add all .o files below /tmp as stuff/* if they aren't writable
|
|
||||||
$zip->addTreeMatching( '/tmp', 'stuff', '\.o$', sub { ! -w } );
|
|
||||||
# add all .so files below /tmp that are smaller than 200 bytes as stuff/*
|
|
||||||
$zip->addTreeMatching( '/tmp', 'stuff', '\.o$', sub { -s < 200 } );
|
|
||||||
# and write them into a file
|
|
||||||
$zip->writeToFileNamed('xxx.zip');
|
|
||||||
|
|
||||||
=head1 Extract a directory/tree
|
|
||||||
|
|
||||||
B<Q:> How can I extract some (or all) files from a Zip into a different
|
|
||||||
directory?
|
|
||||||
|
|
||||||
B<A:> You can use the Archive::Zip::extractTree() method:
|
|
||||||
??? ||
|
|
||||||
|
|
||||||
# now extract the same files into /tmpx
|
|
||||||
$zip->extractTree( 'stuff', '/tmpx' );
|
|
||||||
|
|
||||||
=head1 Update a directory/tree
|
|
||||||
|
|
||||||
B<Q:> How can I update a Zip from a directory tree, adding or replacing only
|
|
||||||
the newer files?
|
|
||||||
|
|
||||||
B<A:> You can use the Archive::Zip::updateTree() method that was added in version 1.09.
|
|
||||||
|
|
||||||
=head1 Zip times might be off by 1 second
|
|
||||||
|
|
||||||
B<Q:> It bothers me greatly that my file times are wrong by one second about half
|
|
||||||
the time. Why don't you do something about it?
|
|
||||||
|
|
||||||
B<A:> Get over it. This is a result of the Zip format storing times in DOS
|
|
||||||
format, which has a resolution of only two seconds.
|
|
||||||
|
|
||||||
=head1 Zip times don't include time zone information
|
|
||||||
|
|
||||||
B<Q:> My file times don't respect time zones. What gives?
|
|
||||||
|
|
||||||
B<A:> If this is important to you, please submit patches to read the various
|
|
||||||
Extra Fields that encode times with time zones. I'm just using the DOS
|
|
||||||
Date/Time, which doesn't have a time zone.
|
|
||||||
|
|
||||||
=head1 How do I make a self-extracting Zip
|
|
||||||
|
|
||||||
B<Q:> I want to make a self-extracting Zip file. Can I do this?
|
|
||||||
|
|
||||||
B<A:> Yes. You can write a self-extracting archive stub (that is, a version of
|
|
||||||
unzip) to the output filehandle that you pass to writeToFileHandle(). See
|
|
||||||
examples/selfex.pl for how to write a self-extracting archive.
|
|
||||||
|
|
||||||
However, you should understand that this will only work on one kind of
|
|
||||||
platform (the one for which the stub was compiled).
|
|
||||||
|
|
||||||
=head1 How can I deal with Zips with prepended garbage (i.e. from Sircam)
|
|
||||||
|
|
||||||
B<Q:> How can I tell if a Zip has been damaged by adding garbage to the
|
|
||||||
beginning or inside the file?
|
|
||||||
|
|
||||||
B<A:> I added code for this for the Amavis virus scanner. You can query archives
|
|
||||||
for their 'eocdOffset' property, which should be 0:
|
|
||||||
|
|
||||||
if ($zip->eocdOffset > 0)
|
|
||||||
{ warn($zip->eocdOffset . " bytes of garbage at beginning or within Zip") }
|
|
||||||
|
|
||||||
When members are extracted, this offset will be used to adjust the start of
|
|
||||||
the member if necessary.
|
|
||||||
|
|
||||||
=head1 Can't extract Shrunk files
|
|
||||||
|
|
||||||
B<Q:> I'm trying to extract a file out of a Zip produced by PKZIP, and keep
|
|
||||||
getting this error message:
|
|
||||||
|
|
||||||
error: Unsupported compression combination: read 6, write 0
|
|
||||||
|
|
||||||
B<A:> You can't uncompress this archive member. Archive::Zip only supports uncompressed
|
|
||||||
members, and compressed members that are compressed using the compression
|
|
||||||
supported by Compress::Raw::Zlib. That means only Deflated and Stored members.
|
|
||||||
|
|
||||||
Your file is compressed using the Shrink format, which is not supported by
|
|
||||||
Compress::Raw::Zlib.
|
|
||||||
|
|
||||||
You could, perhaps, use a command-line UnZip program (like the Info-Zip
|
|
||||||
one) to extract this.
|
|
||||||
|
|
||||||
=head1 Can't do decryption
|
|
||||||
|
|
||||||
B<Q:> How do I decrypt encrypted Zip members?
|
|
||||||
|
|
||||||
B<A:> With some other program or library. Archive::Zip doesn't support decryption,
|
|
||||||
and probably never will (unless I<you> write it).
|
|
||||||
|
|
||||||
=head1 How to test file integrity?
|
|
||||||
|
|
||||||
B<Q:> How can Archive::Zip can test the validity of a Zip file?
|
|
||||||
|
|
||||||
B<A:> If you try to decompress the file, the gzip streams will report errors
|
|
||||||
if you have garbage. Most of the time.
|
|
||||||
|
|
||||||
If you try to open the file and a central directory structure can't be
|
|
||||||
found, an error will be reported.
|
|
||||||
|
|
||||||
When a file is being read, if we can't find a proper PK.. signature in
|
|
||||||
the right places we report a format error.
|
|
||||||
|
|
||||||
If there is added garbage at the beginning of a Zip file (as inserted
|
|
||||||
by some viruses), you can find out about it, but Archive::Zip will ignore it,
|
|
||||||
and you can still use the archive. When it gets written back out the
|
|
||||||
added stuff will be gone.
|
|
||||||
|
|
||||||
There are two ready-to-use utilities in the examples directory that can
|
|
||||||
be used to test file integrity, or that you can use as examples
|
|
||||||
for your own code:
|
|
||||||
|
|
||||||
=over 4
|
|
||||||
|
|
||||||
=item examples/zipcheck.pl shows how to use an attempted extraction to test a file.
|
|
||||||
|
|
||||||
=item examples/ziptest.pl shows how to test CRCs in a file.
|
|
||||||
|
|
||||||
=back
|
|
||||||
|
|
||||||
=head1 Duplicate files in Zip?
|
|
||||||
|
|
||||||
B<Q:> Archive::Zip let me put the same file in my Zip twice! Why don't you prevent this?
|
|
||||||
|
|
||||||
B<A:> As far as I can tell, this is not disallowed by the Zip spec. If you
|
|
||||||
think it's a bad idea, check for it yourself:
|
|
||||||
|
|
||||||
$zip->addFile($someFile, $someName) unless $zip->memberNamed($someName);
|
|
||||||
|
|
||||||
I can even imagine cases where this might be useful (for instance, multiple
|
|
||||||
versions of files).
|
|
||||||
|
|
||||||
=head1 File ownership/permissions/ACLS/etc
|
|
||||||
|
|
||||||
B<Q:> Why doesn't Archive::Zip deal with file ownership, ACLs, etc.?
|
|
||||||
|
|
||||||
B<A:> There is no standard way to represent these in the Zip file format. If
|
|
||||||
you want to send me code to properly handle the various extra fields that
|
|
||||||
have been used to represent these through the years, I'll look at it.
|
|
||||||
|
|
||||||
=head1 I can't compile but ActiveState only has an old version of Archive::Zip
|
|
||||||
|
|
||||||
B<Q:> I've only installed modules using ActiveState's PPM program and
|
|
||||||
repository. But they have a much older version of Archive::Zip than is in CPAN. Will
|
|
||||||
you send me a newer PPM?
|
|
||||||
|
|
||||||
B<A:> Probably not, unless I get lots of extra time. But there's no reason you
|
|
||||||
can't install the version from CPAN. Archive::Zip is pure Perl, so all you need is
|
|
||||||
NMAKE, which you can get for free from Microsoft (see the FAQ in the
|
|
||||||
ActiveState documentation for details on how to install CPAN modules).
|
|
||||||
|
|
||||||
=head1 My JPEGs (or MP3's) don't compress when I put them into Zips!
|
|
||||||
|
|
||||||
B<Q:> How come my JPEGs and MP3's don't compress much when I put them into Zips?
|
|
||||||
|
|
||||||
B<A:> Because they're already compressed.
|
|
||||||
|
|
||||||
=head1 Under Windows, things lock up/get damaged
|
|
||||||
|
|
||||||
B<Q:> I'm using Windows. When I try to use Archive::Zip, my machine locks up/makes
|
|
||||||
funny sounds/displays a BSOD/corrupts data. How can I fix this?
|
|
||||||
|
|
||||||
B<A:> First, try the newest version of Compress::Raw::Zlib. I know of
|
|
||||||
Windows-related problems prior to v1.14 of that library.
|
|
||||||
|
|
||||||
=head1 Zip contents in a scalar
|
|
||||||
|
|
||||||
B<Q:> I want to read a Zip file from (or write one to) a scalar variable instead
|
|
||||||
of a file. How can I do this?
|
|
||||||
|
|
||||||
B<A:> Use C<IO::String> and the C<readFromFileHandle()> and
|
|
||||||
C<writeToFileHandle()> methods.
|
|
||||||
See C<examples/readScalar.pl> and C<examples/writeScalar.pl>.
|
|
||||||
|
|
||||||
=head1 Reading from streams
|
|
||||||
|
|
||||||
B<Q:> How do I read from a stream (like for the Info-Zip C<funzip> program)?
|
|
||||||
|
|
||||||
B<A:> This is not currently supported, though writing to a stream is.
|
|
|
@ -1,64 +0,0 @@
|
||||||
package Archive::Zip::FileMember;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use vars qw( $VERSION @ISA );
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
@ISA = qw ( Archive::Zip::Member );
|
|
||||||
}
|
|
||||||
|
|
||||||
use Archive::Zip qw(
|
|
||||||
:UTILITY_METHODS
|
|
||||||
);
|
|
||||||
|
|
||||||
sub externalFileName {
|
|
||||||
shift->{'externalFileName'};
|
|
||||||
}
|
|
||||||
|
|
||||||
# Return true if I depend on the named file
|
|
||||||
sub _usesFileNamed {
|
|
||||||
my $self = shift;
|
|
||||||
my $fileName = shift;
|
|
||||||
my $xfn = $self->externalFileName();
|
|
||||||
return undef if ref($xfn);
|
|
||||||
return $xfn eq $fileName;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub fh {
|
|
||||||
my $self = shift;
|
|
||||||
$self->_openFile()
|
|
||||||
if !defined($self->{'fh'}) || !$self->{'fh'}->opened();
|
|
||||||
return $self->{'fh'};
|
|
||||||
}
|
|
||||||
|
|
||||||
# opens my file handle from my file name
|
|
||||||
sub _openFile {
|
|
||||||
my $self = shift;
|
|
||||||
my ($status, $fh) = _newFileHandle($self->externalFileName(), 'r');
|
|
||||||
if (!$status) {
|
|
||||||
_ioError("Can't open", $self->externalFileName());
|
|
||||||
return undef;
|
|
||||||
}
|
|
||||||
$self->{'fh'} = $fh;
|
|
||||||
_binmode($fh);
|
|
||||||
return $fh;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Make sure I close my file handle
|
|
||||||
sub endRead {
|
|
||||||
my $self = shift;
|
|
||||||
undef $self->{'fh'}; # _closeFile();
|
|
||||||
return $self->SUPER::endRead(@_);
|
|
||||||
}
|
|
||||||
|
|
||||||
sub _become {
|
|
||||||
my $self = shift;
|
|
||||||
my $newClass = shift;
|
|
||||||
return $self if ref($self) eq $newClass;
|
|
||||||
delete($self->{'externalFileName'});
|
|
||||||
delete($self->{'fh'});
|
|
||||||
return $self->SUPER::_become($newClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,348 +0,0 @@
|
||||||
package Archive::Zip::MemberRead;
|
|
||||||
|
|
||||||
=head1 NAME
|
|
||||||
|
|
||||||
Archive::Zip::MemberRead - A wrapper that lets you read Zip archive members as if they were files.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
=head1 SYNOPSIS
|
|
||||||
|
|
||||||
use Archive::Zip;
|
|
||||||
use Archive::Zip::MemberRead;
|
|
||||||
$zip = Archive::Zip->new("file.zip");
|
|
||||||
$fh = Archive::Zip::MemberRead->new($zip, "subdir/abc.txt");
|
|
||||||
while (defined($line = $fh->getline()))
|
|
||||||
{
|
|
||||||
print $fh->input_line_number . "#: $line\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
$read = $fh->read($buffer, 32*1024);
|
|
||||||
print "Read $read bytes as :$buffer:\n";
|
|
||||||
|
|
||||||
=head1 DESCRIPTION
|
|
||||||
|
|
||||||
The Archive::Zip::MemberRead module lets you read Zip archive member data
|
|
||||||
just like you read data from files.
|
|
||||||
|
|
||||||
=head1 METHODS
|
|
||||||
|
|
||||||
=over 4
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
|
|
||||||
use Archive::Zip qw( :ERROR_CODES :CONSTANTS );
|
|
||||||
|
|
||||||
use vars qw{$VERSION};
|
|
||||||
|
|
||||||
my $nl;
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
$VERSION = eval $VERSION;
|
|
||||||
|
|
||||||
# Requirement for newline conversion. Should check for e.g., DOS and OS/2 as well, but am too lazy.
|
|
||||||
$nl = $^O eq 'MSWin32' ? "\r\n" : "\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
=item Archive::Zip::Member::readFileHandle()
|
|
||||||
|
|
||||||
You can get a C<Archive::Zip::MemberRead> from an archive member by
|
|
||||||
calling C<readFileHandle()>:
|
|
||||||
|
|
||||||
my $member = $zip->memberNamed('abc/def.c');
|
|
||||||
my $fh = $member->readFileHandle();
|
|
||||||
while (defined($line = $fh->getline()))
|
|
||||||
{
|
|
||||||
# ...
|
|
||||||
}
|
|
||||||
$fh->close();
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub Archive::Zip::Member::readFileHandle {
|
|
||||||
return Archive::Zip::MemberRead->new(shift());
|
|
||||||
}
|
|
||||||
|
|
||||||
=item Archive::Zip::MemberRead->new($zip, $fileName)
|
|
||||||
|
|
||||||
=item Archive::Zip::MemberRead->new($zip, $member)
|
|
||||||
|
|
||||||
=item Archive::Zip::MemberRead->new($member)
|
|
||||||
|
|
||||||
Construct a new Archive::Zip::MemberRead on the specified member.
|
|
||||||
|
|
||||||
my $fh = Archive::Zip::MemberRead->new($zip, 'fred.c')
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub new {
|
|
||||||
my ($class, $zip, $file) = @_;
|
|
||||||
my ($self, $member);
|
|
||||||
|
|
||||||
if ($zip && $file) # zip and filename, or zip and member
|
|
||||||
{
|
|
||||||
$member = ref($file) ? $file : $zip->memberNamed($file);
|
|
||||||
} elsif ($zip && !$file && ref($zip)) # just member
|
|
||||||
{
|
|
||||||
$member = $zip;
|
|
||||||
} else {
|
|
||||||
die(
|
|
||||||
'Archive::Zip::MemberRead::new needs a zip and filename, zip and member, or member'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
$self = {};
|
|
||||||
bless($self, $class);
|
|
||||||
$self->set_member($member);
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub set_member {
|
|
||||||
my ($self, $member) = @_;
|
|
||||||
|
|
||||||
$self->{member} = $member;
|
|
||||||
$self->set_compression(COMPRESSION_STORED);
|
|
||||||
$self->rewind();
|
|
||||||
}
|
|
||||||
|
|
||||||
sub set_compression {
|
|
||||||
my ($self, $compression) = @_;
|
|
||||||
$self->{member}->desiredCompressionMethod($compression) if $self->{member};
|
|
||||||
}
|
|
||||||
|
|
||||||
=item setLineEnd(expr)
|
|
||||||
|
|
||||||
Set the line end character to use. This is set to \n by default
|
|
||||||
except on Windows systems where it is set to \r\n. You will
|
|
||||||
only need to set this on systems which are not Windows or Unix
|
|
||||||
based and require a line end different from \n.
|
|
||||||
This is a class method so call as C<Archive::Zip::MemberRead>->C<setLineEnd($nl)>
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub setLineEnd {
|
|
||||||
shift;
|
|
||||||
$nl = shift;
|
|
||||||
}
|
|
||||||
|
|
||||||
=item rewind()
|
|
||||||
|
|
||||||
Rewinds an C<Archive::Zip::MemberRead> so that you can read from it again
|
|
||||||
starting at the beginning.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub rewind {
|
|
||||||
my $self = shift;
|
|
||||||
|
|
||||||
$self->_reset_vars();
|
|
||||||
$self->{member}->rewindData() if $self->{member};
|
|
||||||
}
|
|
||||||
|
|
||||||
sub _reset_vars {
|
|
||||||
my $self = shift;
|
|
||||||
|
|
||||||
$self->{line_no} = 0;
|
|
||||||
$self->{at_end} = 0;
|
|
||||||
|
|
||||||
delete $self->{buffer};
|
|
||||||
}
|
|
||||||
|
|
||||||
=item input_record_separator(expr)
|
|
||||||
|
|
||||||
If the argument is given, input_record_separator for this
|
|
||||||
instance is set to it. The current setting (which may be
|
|
||||||
the global $/) is always returned.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub input_record_separator {
|
|
||||||
my $self = shift;
|
|
||||||
if (@_) {
|
|
||||||
$self->{sep} = shift;
|
|
||||||
$self->{sep_re} =
|
|
||||||
_sep_as_re($self->{sep}); # Cache the RE as an optimization
|
|
||||||
}
|
|
||||||
return exists $self->{sep} ? $self->{sep} : $/;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Return the input_record_separator in use as an RE fragment
|
|
||||||
# Note that if we have a per-instance input_record_separator
|
|
||||||
# we can just return the already converted value. Otherwise,
|
|
||||||
# the conversion must be done on $/ every time since we cannot
|
|
||||||
# know whether it has changed or not.
|
|
||||||
sub _sep_re {
|
|
||||||
my $self = shift;
|
|
||||||
|
|
||||||
# Important to phrase this way: sep's value may be undef.
|
|
||||||
return exists $self->{sep} ? $self->{sep_re} : _sep_as_re($/);
|
|
||||||
}
|
|
||||||
|
|
||||||
# Convert the input record separator into an RE and return it.
|
|
||||||
sub _sep_as_re {
|
|
||||||
my $sep = shift;
|
|
||||||
if (defined $sep) {
|
|
||||||
if ($sep eq '') {
|
|
||||||
return "(?:$nl){2,}";
|
|
||||||
} else {
|
|
||||||
$sep =~ s/\n/$nl/og;
|
|
||||||
return quotemeta $sep;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return undef;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
=item input_line_number()
|
|
||||||
|
|
||||||
Returns the current line number, but only if you're using C<getline()>.
|
|
||||||
Using C<read()> will not update the line number.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub input_line_number {
|
|
||||||
my $self = shift;
|
|
||||||
return $self->{line_no};
|
|
||||||
}
|
|
||||||
|
|
||||||
=item close()
|
|
||||||
|
|
||||||
Closes the given file handle.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub close {
|
|
||||||
my $self = shift;
|
|
||||||
|
|
||||||
$self->_reset_vars();
|
|
||||||
$self->{member}->endRead();
|
|
||||||
}
|
|
||||||
|
|
||||||
=item buffer_size([ $size ])
|
|
||||||
|
|
||||||
Gets or sets the buffer size used for reads.
|
|
||||||
Default is the chunk size used by Archive::Zip.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub buffer_size {
|
|
||||||
my ($self, $size) = @_;
|
|
||||||
|
|
||||||
if (!$size) {
|
|
||||||
return $self->{chunkSize} || Archive::Zip::chunkSize();
|
|
||||||
} else {
|
|
||||||
$self->{chunkSize} = $size;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
=item getline()
|
|
||||||
|
|
||||||
Returns the next line from the currently open member.
|
|
||||||
Makes sense only for text files.
|
|
||||||
A read error is considered fatal enough to die.
|
|
||||||
Returns undef on eof. All subsequent calls would return undef,
|
|
||||||
unless a rewind() is called.
|
|
||||||
Note: The line returned has the input_record_separator (default: newline) removed.
|
|
||||||
|
|
||||||
=item getline( { preserve_line_ending => 1 } )
|
|
||||||
|
|
||||||
Returns the next line including the line ending.
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
sub getline {
|
|
||||||
my ($self, $argref) = @_;
|
|
||||||
|
|
||||||
my $size = $self->buffer_size();
|
|
||||||
my $sep = $self->_sep_re();
|
|
||||||
|
|
||||||
my $preserve_line_ending;
|
|
||||||
if (ref $argref eq 'HASH') {
|
|
||||||
$preserve_line_ending = $argref->{'preserve_line_ending'};
|
|
||||||
$sep =~ s/\\([^A-Za-z_0-9])+/$1/g;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (; ;) {
|
|
||||||
if ( $sep
|
|
||||||
&& defined($self->{buffer})
|
|
||||||
&& $self->{buffer} =~ s/^(.*?)$sep//s) {
|
|
||||||
my $line = $1;
|
|
||||||
$self->{line_no}++;
|
|
||||||
if ($preserve_line_ending) {
|
|
||||||
return $line . $sep;
|
|
||||||
} else {
|
|
||||||
return $line;
|
|
||||||
}
|
|
||||||
} elsif ($self->{at_end}) {
|
|
||||||
$self->{line_no}++ if $self->{buffer};
|
|
||||||
return delete $self->{buffer};
|
|
||||||
}
|
|
||||||
my ($temp, $status) = $self->{member}->readChunk($size);
|
|
||||||
if ($status != AZ_OK && $status != AZ_STREAM_END) {
|
|
||||||
die "ERROR: Error reading chunk from archive - $status";
|
|
||||||
}
|
|
||||||
$self->{at_end} = $status == AZ_STREAM_END;
|
|
||||||
$self->{buffer} .= $$temp;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
=item read($buffer, $num_bytes_to_read)
|
|
||||||
|
|
||||||
Simulates a normal C<read()> system call.
|
|
||||||
Returns the no. of bytes read. C<undef> on error, 0 on eof, I<e.g.>:
|
|
||||||
|
|
||||||
$fh = Archive::Zip::MemberRead->new($zip, "sreeji/secrets.bin");
|
|
||||||
while (1)
|
|
||||||
{
|
|
||||||
$read = $fh->read($buffer, 1024);
|
|
||||||
die "FATAL ERROR reading my secrets !\n" if (!defined($read));
|
|
||||||
last if (!$read);
|
|
||||||
# Do processing.
|
|
||||||
....
|
|
||||||
}
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
||||||
#
|
|
||||||
# All these $_ are required to emulate read().
|
|
||||||
#
|
|
||||||
sub read {
|
|
||||||
my $self = $_[0];
|
|
||||||
my $size = $_[2];
|
|
||||||
my ($temp, $status, $ret);
|
|
||||||
|
|
||||||
($temp, $status) = $self->{member}->readChunk($size);
|
|
||||||
if ($status != AZ_OK && $status != AZ_STREAM_END) {
|
|
||||||
$_[1] = undef;
|
|
||||||
$ret = undef;
|
|
||||||
} else {
|
|
||||||
$_[1] = $$temp;
|
|
||||||
$ret = length($$temp);
|
|
||||||
}
|
|
||||||
return $ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
||||||
|
|
||||||
=back
|
|
||||||
|
|
||||||
=head1 AUTHOR
|
|
||||||
|
|
||||||
Sreeji K. Das E<lt>sreeji_k@yahoo.comE<gt>
|
|
||||||
|
|
||||||
See L<Archive::Zip> by Ned Konz without which this module does not make
|
|
||||||
any sense!
|
|
||||||
|
|
||||||
Minor mods by Ned Konz.
|
|
||||||
|
|
||||||
=head1 COPYRIGHT
|
|
||||||
|
|
||||||
Copyright 2002 Sreeji K. Das.
|
|
||||||
|
|
||||||
This program is free software; you can redistribute it and/or modify it under
|
|
||||||
the same terms as Perl itself.
|
|
||||||
|
|
||||||
=cut
|
|
|
@ -1,69 +0,0 @@
|
||||||
package Archive::Zip::MockFileHandle;
|
|
||||||
|
|
||||||
# Output file handle that calls a custom write routine
|
|
||||||
# Ned Konz, March 2000
|
|
||||||
# This is provided to help with writing zip files
|
|
||||||
# when you have to process them a chunk at a time.
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
|
|
||||||
use vars qw{$VERSION};
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
$VERSION = eval $VERSION;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub new {
|
|
||||||
my $class = shift || __PACKAGE__;
|
|
||||||
$class = ref($class) || $class;
|
|
||||||
my $self = bless(
|
|
||||||
{
|
|
||||||
'position' => 0,
|
|
||||||
'size' => 0
|
|
||||||
},
|
|
||||||
$class
|
|
||||||
);
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub eof {
|
|
||||||
my $self = shift;
|
|
||||||
return $self->{'position'} >= $self->{'size'};
|
|
||||||
}
|
|
||||||
|
|
||||||
# Copy given buffer to me
|
|
||||||
sub print {
|
|
||||||
my $self = shift;
|
|
||||||
my $bytes = join('', @_);
|
|
||||||
my $bytesWritten = $self->writeHook($bytes);
|
|
||||||
if ($self->{'position'} + $bytesWritten > $self->{'size'}) {
|
|
||||||
$self->{'size'} = $self->{'position'} + $bytesWritten;
|
|
||||||
}
|
|
||||||
$self->{'position'} += $bytesWritten;
|
|
||||||
return $bytesWritten;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Called on each write.
|
|
||||||
# Override in subclasses.
|
|
||||||
# Return number of bytes written (0 on error).
|
|
||||||
sub writeHook {
|
|
||||||
my $self = shift;
|
|
||||||
my $bytes = shift;
|
|
||||||
return length($bytes);
|
|
||||||
}
|
|
||||||
|
|
||||||
sub binmode { 1 }
|
|
||||||
|
|
||||||
sub close { 1 }
|
|
||||||
|
|
||||||
sub clearerr { 1 }
|
|
||||||
|
|
||||||
# I'm write-only!
|
|
||||||
sub read { 0 }
|
|
||||||
|
|
||||||
sub tell { return shift->{'position'} }
|
|
||||||
|
|
||||||
sub opened { 1 }
|
|
||||||
|
|
||||||
1;
|
|
|
@ -1,77 +0,0 @@
|
||||||
package Archive::Zip::NewFileMember;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use vars qw( $VERSION @ISA );
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
@ISA = qw ( Archive::Zip::FileMember );
|
|
||||||
}
|
|
||||||
|
|
||||||
use Archive::Zip qw(
|
|
||||||
:CONSTANTS
|
|
||||||
:ERROR_CODES
|
|
||||||
:UTILITY_METHODS
|
|
||||||
);
|
|
||||||
|
|
||||||
# Given a file name, set up for eventual writing.
|
|
||||||
sub _newFromFileNamed {
|
|
||||||
my $class = shift;
|
|
||||||
my $fileName = shift; # local FS format
|
|
||||||
my $newName = shift;
|
|
||||||
$newName = _asZipDirName($fileName) unless defined($newName);
|
|
||||||
return undef unless (stat($fileName) && -r _ && !-d _ );
|
|
||||||
my $self = $class->new(@_);
|
|
||||||
$self->{'fileName'} = $newName;
|
|
||||||
$self->{'externalFileName'} = $fileName;
|
|
||||||
$self->{'compressionMethod'} = COMPRESSION_STORED;
|
|
||||||
my @stat = stat(_);
|
|
||||||
$self->{'compressedSize'} = $self->{'uncompressedSize'} = $stat[7];
|
|
||||||
$self->desiredCompressionMethod(
|
|
||||||
($self->compressedSize() > 0)
|
|
||||||
? COMPRESSION_DEFLATED
|
|
||||||
: COMPRESSION_STORED
|
|
||||||
);
|
|
||||||
$self->unixFileAttributes($stat[2]);
|
|
||||||
$self->setLastModFileDateTimeFromUnix($stat[9]);
|
|
||||||
$self->isTextFile(-T _ );
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub rewindData {
|
|
||||||
my $self = shift;
|
|
||||||
|
|
||||||
my $status = $self->SUPER::rewindData(@_);
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
|
|
||||||
return AZ_IO_ERROR unless $self->fh();
|
|
||||||
$self->fh()->clearerr();
|
|
||||||
$self->fh()->seek(0, IO::Seekable::SEEK_SET)
|
|
||||||
or return _ioError("rewinding", $self->externalFileName());
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Return bytes read. Note that first parameter is a ref to a buffer.
|
|
||||||
# my $data;
|
|
||||||
# my ( $bytesRead, $status) = $self->readRawChunk( \$data, $chunkSize );
|
|
||||||
sub _readRawChunk {
|
|
||||||
my ($self, $dataRef, $chunkSize) = @_;
|
|
||||||
return (0, AZ_OK) unless $chunkSize;
|
|
||||||
my $bytesRead = $self->fh()->read($$dataRef, $chunkSize)
|
|
||||||
or return (0, _ioError("reading data"));
|
|
||||||
return ($bytesRead, AZ_OK);
|
|
||||||
}
|
|
||||||
|
|
||||||
# If I already exist, extraction is a no-op.
|
|
||||||
sub extractToFileNamed {
|
|
||||||
my $self = shift;
|
|
||||||
my $name = shift; # local FS name
|
|
||||||
if (File::Spec->rel2abs($name) eq
|
|
||||||
File::Spec->rel2abs($self->externalFileName()) and -r $name) {
|
|
||||||
return AZ_OK;
|
|
||||||
} else {
|
|
||||||
return $self->SUPER::extractToFileNamed($name, @_);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
|
@ -1,64 +0,0 @@
|
||||||
package Archive::Zip::StringMember;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use vars qw( $VERSION @ISA );
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
@ISA = qw( Archive::Zip::Member );
|
|
||||||
}
|
|
||||||
|
|
||||||
use Archive::Zip qw(
|
|
||||||
:CONSTANTS
|
|
||||||
:ERROR_CODES
|
|
||||||
);
|
|
||||||
|
|
||||||
# Create a new string member. Default is COMPRESSION_STORED.
|
|
||||||
# Can take a ref to a string as well.
|
|
||||||
sub _newFromString {
|
|
||||||
my $class = shift;
|
|
||||||
my $string = shift;
|
|
||||||
my $name = shift;
|
|
||||||
my $self = $class->new(@_);
|
|
||||||
$self->contents($string);
|
|
||||||
$self->fileName($name) if defined($name);
|
|
||||||
|
|
||||||
# Set the file date to now
|
|
||||||
$self->setLastModFileDateTimeFromUnix(time());
|
|
||||||
$self->unixFileAttributes($self->DEFAULT_FILE_PERMISSIONS);
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub _become {
|
|
||||||
my $self = shift;
|
|
||||||
my $newClass = shift;
|
|
||||||
return $self if ref($self) eq $newClass;
|
|
||||||
delete($self->{'contents'});
|
|
||||||
return $self->SUPER::_become($newClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get or set my contents. Note that we do not call the superclass
|
|
||||||
# version of this, because it calls us.
|
|
||||||
sub contents {
|
|
||||||
my $self = shift;
|
|
||||||
my $string = shift;
|
|
||||||
if (defined($string)) {
|
|
||||||
$self->{'contents'} =
|
|
||||||
pack('C0a*', (ref($string) eq 'SCALAR') ? $$string : $string);
|
|
||||||
$self->{'uncompressedSize'} = $self->{'compressedSize'} =
|
|
||||||
length($self->{'contents'});
|
|
||||||
$self->{'compressionMethod'} = COMPRESSION_STORED;
|
|
||||||
}
|
|
||||||
return $self->{'contents'};
|
|
||||||
}
|
|
||||||
|
|
||||||
# Return bytes read. Note that first parameter is a ref to a buffer.
|
|
||||||
# my $data;
|
|
||||||
# my ( $bytesRead, $status) = $self->readRawChunk( \$data, $chunkSize );
|
|
||||||
sub _readRawChunk {
|
|
||||||
my ($self, $dataRef, $chunkSize) = @_;
|
|
||||||
$$dataRef = substr($self->contents(), $self->_readOffset(), $chunkSize);
|
|
||||||
return (length($$dataRef), AZ_OK);
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
|
@ -1,48 +0,0 @@
|
||||||
package Archive::Zip::Tree;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use vars qw{$VERSION};
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
}
|
|
||||||
|
|
||||||
use Archive::Zip;
|
|
||||||
|
|
||||||
warn(
|
|
||||||
"Archive::Zip::Tree is deprecated; its methods have been moved into Archive::Zip."
|
|
||||||
) if $^W;
|
|
||||||
|
|
||||||
1;
|
|
||||||
|
|
||||||
__END__
|
|
||||||
|
|
||||||
=head1 NAME
|
|
||||||
|
|
||||||
Archive::Zip::Tree - (DEPRECATED) methods for adding/extracting trees using Archive::Zip
|
|
||||||
|
|
||||||
=head1 DESCRIPTION
|
|
||||||
|
|
||||||
This module is deprecated, because all its methods were moved into the main
|
|
||||||
Archive::Zip module.
|
|
||||||
|
|
||||||
It is included in the distribution merely to avoid breaking old code.
|
|
||||||
|
|
||||||
See L<Archive::Zip>.
|
|
||||||
|
|
||||||
=head1 AUTHOR
|
|
||||||
|
|
||||||
Ned Konz, perl@bike-nomad.com
|
|
||||||
|
|
||||||
=head1 COPYRIGHT
|
|
||||||
|
|
||||||
Copyright (c) 2000-2002 Ned Konz. All rights reserved. This program is free
|
|
||||||
software; you can redistribute it and/or modify it under the same terms
|
|
||||||
as Perl itself.
|
|
||||||
|
|
||||||
=head1 SEE ALSO
|
|
||||||
|
|
||||||
L<Archive::Zip>
|
|
||||||
|
|
||||||
=cut
|
|
||||||
|
|
|
@ -1,416 +0,0 @@
|
||||||
package Archive::Zip::ZipFileMember;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use vars qw( $VERSION @ISA );
|
|
||||||
|
|
||||||
BEGIN {
|
|
||||||
$VERSION = '1.48';
|
|
||||||
@ISA = qw ( Archive::Zip::FileMember );
|
|
||||||
}
|
|
||||||
|
|
||||||
use Archive::Zip qw(
|
|
||||||
:CONSTANTS
|
|
||||||
:ERROR_CODES
|
|
||||||
:PKZIP_CONSTANTS
|
|
||||||
:UTILITY_METHODS
|
|
||||||
);
|
|
||||||
|
|
||||||
# Create a new Archive::Zip::ZipFileMember
|
|
||||||
# given a filename and optional open file handle
|
|
||||||
#
|
|
||||||
sub _newFromZipFile {
|
|
||||||
my $class = shift;
|
|
||||||
my $fh = shift;
|
|
||||||
my $externalFileName = shift;
|
|
||||||
my $possibleEocdOffset = shift; # normally 0
|
|
||||||
|
|
||||||
my $self = $class->new(
|
|
||||||
'crc32' => 0,
|
|
||||||
'diskNumberStart' => 0,
|
|
||||||
'localHeaderRelativeOffset' => 0,
|
|
||||||
'dataOffset' => 0, # localHeaderRelativeOffset + header length
|
|
||||||
@_
|
|
||||||
);
|
|
||||||
$self->{'externalFileName'} = $externalFileName;
|
|
||||||
$self->{'fh'} = $fh;
|
|
||||||
$self->{'possibleEocdOffset'} = $possibleEocdOffset;
|
|
||||||
return $self;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub isDirectory {
|
|
||||||
my $self = shift;
|
|
||||||
return (substr($self->fileName, -1, 1) eq '/'
|
|
||||||
and $self->uncompressedSize == 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
# Seek to the beginning of the local header, just past the signature.
|
|
||||||
# Verify that the local header signature is in fact correct.
|
|
||||||
# Update the localHeaderRelativeOffset if necessary by adding the possibleEocdOffset.
|
|
||||||
# Returns status.
|
|
||||||
|
|
||||||
sub _seekToLocalHeader {
|
|
||||||
my $self = shift;
|
|
||||||
my $where = shift; # optional
|
|
||||||
my $previousWhere = shift; # optional
|
|
||||||
|
|
||||||
$where = $self->localHeaderRelativeOffset() unless defined($where);
|
|
||||||
|
|
||||||
# avoid loop on certain corrupt files (from Julian Field)
|
|
||||||
return _formatError("corrupt zip file")
|
|
||||||
if defined($previousWhere) && $where == $previousWhere;
|
|
||||||
|
|
||||||
my $status;
|
|
||||||
my $signature;
|
|
||||||
|
|
||||||
$status = $self->fh()->seek($where, IO::Seekable::SEEK_SET);
|
|
||||||
return _ioError("seeking to local header") unless $status;
|
|
||||||
|
|
||||||
($status, $signature) =
|
|
||||||
_readSignature($self->fh(), $self->externalFileName(),
|
|
||||||
LOCAL_FILE_HEADER_SIGNATURE);
|
|
||||||
return $status if $status == AZ_IO_ERROR;
|
|
||||||
|
|
||||||
# retry with EOCD offset if any was given.
|
|
||||||
if ($status == AZ_FORMAT_ERROR && $self->{'possibleEocdOffset'}) {
|
|
||||||
$status = $self->_seekToLocalHeader(
|
|
||||||
$self->localHeaderRelativeOffset() + $self->{'possibleEocdOffset'},
|
|
||||||
$where
|
|
||||||
);
|
|
||||||
if ($status == AZ_OK) {
|
|
||||||
$self->{'localHeaderRelativeOffset'} +=
|
|
||||||
$self->{'possibleEocdOffset'};
|
|
||||||
$self->{'possibleEocdOffset'} = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return $status;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Because I'm going to delete the file handle, read the local file
|
|
||||||
# header if the file handle is seekable. If it is not, I assume that
|
|
||||||
# I've already read the local header.
|
|
||||||
# Return ( $status, $self )
|
|
||||||
|
|
||||||
sub _become {
|
|
||||||
my $self = shift;
|
|
||||||
my $newClass = shift;
|
|
||||||
return $self if ref($self) eq $newClass;
|
|
||||||
|
|
||||||
my $status = AZ_OK;
|
|
||||||
|
|
||||||
if (_isSeekable($self->fh())) {
|
|
||||||
my $here = $self->fh()->tell();
|
|
||||||
$status = $self->_seekToLocalHeader();
|
|
||||||
$status = $self->_readLocalFileHeader() if $status == AZ_OK;
|
|
||||||
$self->fh()->seek($here, IO::Seekable::SEEK_SET);
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
delete($self->{'eocdCrc32'});
|
|
||||||
delete($self->{'diskNumberStart'});
|
|
||||||
delete($self->{'localHeaderRelativeOffset'});
|
|
||||||
delete($self->{'dataOffset'});
|
|
||||||
|
|
||||||
return $self->SUPER::_become($newClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
sub diskNumberStart {
|
|
||||||
shift->{'diskNumberStart'};
|
|
||||||
}
|
|
||||||
|
|
||||||
sub localHeaderRelativeOffset {
|
|
||||||
shift->{'localHeaderRelativeOffset'};
|
|
||||||
}
|
|
||||||
|
|
||||||
sub dataOffset {
|
|
||||||
shift->{'dataOffset'};
|
|
||||||
}
|
|
||||||
|
|
||||||
# Skip local file header, updating only extra field stuff.
|
|
||||||
# Assumes that fh is positioned before signature.
|
|
||||||
sub _skipLocalFileHeader {
|
|
||||||
my $self = shift;
|
|
||||||
my $header;
|
|
||||||
my $bytesRead = $self->fh()->read($header, LOCAL_FILE_HEADER_LENGTH);
|
|
||||||
if ($bytesRead != LOCAL_FILE_HEADER_LENGTH) {
|
|
||||||
return _ioError("reading local file header");
|
|
||||||
}
|
|
||||||
my $fileNameLength;
|
|
||||||
my $extraFieldLength;
|
|
||||||
my $bitFlag;
|
|
||||||
(
|
|
||||||
undef, # $self->{'versionNeededToExtract'},
|
|
||||||
$bitFlag,
|
|
||||||
undef, # $self->{'compressionMethod'},
|
|
||||||
undef, # $self->{'lastModFileDateTime'},
|
|
||||||
undef, # $crc32,
|
|
||||||
undef, # $compressedSize,
|
|
||||||
undef, # $uncompressedSize,
|
|
||||||
$fileNameLength,
|
|
||||||
$extraFieldLength
|
|
||||||
) = unpack(LOCAL_FILE_HEADER_FORMAT, $header);
|
|
||||||
|
|
||||||
if ($fileNameLength) {
|
|
||||||
$self->fh()->seek($fileNameLength, IO::Seekable::SEEK_CUR)
|
|
||||||
or return _ioError("skipping local file name");
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($extraFieldLength) {
|
|
||||||
$bytesRead =
|
|
||||||
$self->fh()->read($self->{'localExtraField'}, $extraFieldLength);
|
|
||||||
if ($bytesRead != $extraFieldLength) {
|
|
||||||
return _ioError("reading local extra field");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$self->{'dataOffset'} = $self->fh()->tell();
|
|
||||||
|
|
||||||
if ($bitFlag & GPBF_HAS_DATA_DESCRIPTOR_MASK) {
|
|
||||||
|
|
||||||
# Read the crc32, compressedSize, and uncompressedSize from the
|
|
||||||
# extended data descriptor, which directly follows the compressed data.
|
|
||||||
#
|
|
||||||
# Skip over the compressed file data (assumes that EOCD compressedSize
|
|
||||||
# was correct)
|
|
||||||
$self->fh()->seek($self->{'compressedSize'}, IO::Seekable::SEEK_CUR)
|
|
||||||
or return _ioError("seeking to extended local header");
|
|
||||||
|
|
||||||
# these values should be set correctly from before.
|
|
||||||
my $oldCrc32 = $self->{'eocdCrc32'};
|
|
||||||
my $oldCompressedSize = $self->{'compressedSize'};
|
|
||||||
my $oldUncompressedSize = $self->{'uncompressedSize'};
|
|
||||||
|
|
||||||
my $status = $self->_readDataDescriptor();
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
|
|
||||||
# The buffer withe encrypted data is prefixed with a new
|
|
||||||
# encrypted 12 byte header. The size only changes when
|
|
||||||
# the buffer is also compressed
|
|
||||||
$self->isEncrypted && $oldUncompressedSize > $self->{uncompressedSize}
|
|
||||||
and $oldUncompressedSize -= DATA_DESCRIPTOR_LENGTH;
|
|
||||||
|
|
||||||
return _formatError(
|
|
||||||
"CRC or size mismatch while skipping data descriptor")
|
|
||||||
if ( $oldCrc32 != $self->{'crc32'}
|
|
||||||
|| $oldUncompressedSize != $self->{'uncompressedSize'});
|
|
||||||
|
|
||||||
$self->{'crc32'} = 0
|
|
||||||
if $self->compressionMethod() == COMPRESSION_STORED ;
|
|
||||||
}
|
|
||||||
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Read from a local file header into myself. Returns AZ_OK if successful.
|
|
||||||
# Assumes that fh is positioned after signature.
|
|
||||||
# Note that crc32, compressedSize, and uncompressedSize will be 0 if
|
|
||||||
# GPBF_HAS_DATA_DESCRIPTOR_MASK is set in the bitFlag.
|
|
||||||
|
|
||||||
sub _readLocalFileHeader {
|
|
||||||
my $self = shift;
|
|
||||||
my $header;
|
|
||||||
my $bytesRead = $self->fh()->read($header, LOCAL_FILE_HEADER_LENGTH);
|
|
||||||
if ($bytesRead != LOCAL_FILE_HEADER_LENGTH) {
|
|
||||||
return _ioError("reading local file header");
|
|
||||||
}
|
|
||||||
my $fileNameLength;
|
|
||||||
my $crc32;
|
|
||||||
my $compressedSize;
|
|
||||||
my $uncompressedSize;
|
|
||||||
my $extraFieldLength;
|
|
||||||
(
|
|
||||||
$self->{'versionNeededToExtract'}, $self->{'bitFlag'},
|
|
||||||
$self->{'compressionMethod'}, $self->{'lastModFileDateTime'},
|
|
||||||
$crc32, $compressedSize,
|
|
||||||
$uncompressedSize, $fileNameLength,
|
|
||||||
$extraFieldLength
|
|
||||||
) = unpack(LOCAL_FILE_HEADER_FORMAT, $header);
|
|
||||||
|
|
||||||
if ($fileNameLength) {
|
|
||||||
my $fileName;
|
|
||||||
$bytesRead = $self->fh()->read($fileName, $fileNameLength);
|
|
||||||
if ($bytesRead != $fileNameLength) {
|
|
||||||
return _ioError("reading local file name");
|
|
||||||
}
|
|
||||||
$self->fileName($fileName);
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($extraFieldLength) {
|
|
||||||
$bytesRead =
|
|
||||||
$self->fh()->read($self->{'localExtraField'}, $extraFieldLength);
|
|
||||||
if ($bytesRead != $extraFieldLength) {
|
|
||||||
return _ioError("reading local extra field");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$self->{'dataOffset'} = $self->fh()->tell();
|
|
||||||
|
|
||||||
if ($self->hasDataDescriptor()) {
|
|
||||||
|
|
||||||
# Read the crc32, compressedSize, and uncompressedSize from the
|
|
||||||
# extended data descriptor.
|
|
||||||
# Skip over the compressed file data (assumes that EOCD compressedSize
|
|
||||||
# was correct)
|
|
||||||
$self->fh()->seek($self->{'compressedSize'}, IO::Seekable::SEEK_CUR)
|
|
||||||
or return _ioError("seeking to extended local header");
|
|
||||||
|
|
||||||
my $status = $self->_readDataDescriptor();
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
} else {
|
|
||||||
return _formatError(
|
|
||||||
"CRC or size mismatch after reading data descriptor")
|
|
||||||
if ( $self->{'crc32'} != $crc32
|
|
||||||
|| $self->{'uncompressedSize'} != $uncompressedSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
# This will read the data descriptor, which is after the end of compressed file
|
|
||||||
# data in members that have GPBF_HAS_DATA_DESCRIPTOR_MASK set in their bitFlag.
|
|
||||||
# The only reliable way to find these is to rely on the EOCD compressedSize.
|
|
||||||
# Assumes that file is positioned immediately after the compressed data.
|
|
||||||
# Returns status; sets crc32, compressedSize, and uncompressedSize.
|
|
||||||
sub _readDataDescriptor {
|
|
||||||
my $self = shift;
|
|
||||||
my $signatureData;
|
|
||||||
my $header;
|
|
||||||
my $crc32;
|
|
||||||
my $compressedSize;
|
|
||||||
my $uncompressedSize;
|
|
||||||
|
|
||||||
my $bytesRead = $self->fh()->read($signatureData, SIGNATURE_LENGTH);
|
|
||||||
return _ioError("reading header signature")
|
|
||||||
if $bytesRead != SIGNATURE_LENGTH;
|
|
||||||
my $signature = unpack(SIGNATURE_FORMAT, $signatureData);
|
|
||||||
|
|
||||||
# unfortunately, the signature appears to be optional.
|
|
||||||
if ($signature == DATA_DESCRIPTOR_SIGNATURE
|
|
||||||
&& ($signature != $self->{'crc32'})) {
|
|
||||||
$bytesRead = $self->fh()->read($header, DATA_DESCRIPTOR_LENGTH);
|
|
||||||
return _ioError("reading data descriptor")
|
|
||||||
if $bytesRead != DATA_DESCRIPTOR_LENGTH;
|
|
||||||
|
|
||||||
($crc32, $compressedSize, $uncompressedSize) =
|
|
||||||
unpack(DATA_DESCRIPTOR_FORMAT, $header);
|
|
||||||
} else {
|
|
||||||
$bytesRead = $self->fh()->read($header, DATA_DESCRIPTOR_LENGTH_NO_SIG);
|
|
||||||
return _ioError("reading data descriptor")
|
|
||||||
if $bytesRead != DATA_DESCRIPTOR_LENGTH_NO_SIG;
|
|
||||||
|
|
||||||
$crc32 = $signature;
|
|
||||||
($compressedSize, $uncompressedSize) =
|
|
||||||
unpack(DATA_DESCRIPTOR_FORMAT_NO_SIG, $header);
|
|
||||||
}
|
|
||||||
|
|
||||||
$self->{'eocdCrc32'} = $self->{'crc32'}
|
|
||||||
unless defined($self->{'eocdCrc32'});
|
|
||||||
$self->{'crc32'} = $crc32;
|
|
||||||
$self->{'compressedSize'} = $compressedSize;
|
|
||||||
$self->{'uncompressedSize'} = $uncompressedSize;
|
|
||||||
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Read a Central Directory header. Return AZ_OK on success.
|
|
||||||
# Assumes that fh is positioned right after the signature.
|
|
||||||
|
|
||||||
sub _readCentralDirectoryFileHeader {
|
|
||||||
my $self = shift;
|
|
||||||
my $fh = $self->fh();
|
|
||||||
my $header = '';
|
|
||||||
my $bytesRead = $fh->read($header, CENTRAL_DIRECTORY_FILE_HEADER_LENGTH);
|
|
||||||
if ($bytesRead != CENTRAL_DIRECTORY_FILE_HEADER_LENGTH) {
|
|
||||||
return _ioError("reading central dir header");
|
|
||||||
}
|
|
||||||
my ($fileNameLength, $extraFieldLength, $fileCommentLength);
|
|
||||||
(
|
|
||||||
$self->{'versionMadeBy'},
|
|
||||||
$self->{'fileAttributeFormat'},
|
|
||||||
$self->{'versionNeededToExtract'},
|
|
||||||
$self->{'bitFlag'},
|
|
||||||
$self->{'compressionMethod'},
|
|
||||||
$self->{'lastModFileDateTime'},
|
|
||||||
$self->{'crc32'},
|
|
||||||
$self->{'compressedSize'},
|
|
||||||
$self->{'uncompressedSize'},
|
|
||||||
$fileNameLength,
|
|
||||||
$extraFieldLength,
|
|
||||||
$fileCommentLength,
|
|
||||||
$self->{'diskNumberStart'},
|
|
||||||
$self->{'internalFileAttributes'},
|
|
||||||
$self->{'externalFileAttributes'},
|
|
||||||
$self->{'localHeaderRelativeOffset'}
|
|
||||||
) = unpack(CENTRAL_DIRECTORY_FILE_HEADER_FORMAT, $header);
|
|
||||||
|
|
||||||
$self->{'eocdCrc32'} = $self->{'crc32'};
|
|
||||||
|
|
||||||
if ($fileNameLength) {
|
|
||||||
$bytesRead = $fh->read($self->{'fileName'}, $fileNameLength);
|
|
||||||
if ($bytesRead != $fileNameLength) {
|
|
||||||
_ioError("reading central dir filename");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ($extraFieldLength) {
|
|
||||||
$bytesRead = $fh->read($self->{'cdExtraField'}, $extraFieldLength);
|
|
||||||
if ($bytesRead != $extraFieldLength) {
|
|
||||||
return _ioError("reading central dir extra field");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ($fileCommentLength) {
|
|
||||||
$bytesRead = $fh->read($self->{'fileComment'}, $fileCommentLength);
|
|
||||||
if ($bytesRead != $fileCommentLength) {
|
|
||||||
return _ioError("reading central dir file comment");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# NK 10/21/04: added to avoid problems with manipulated headers
|
|
||||||
if ( $self->{'uncompressedSize'} != $self->{'compressedSize'}
|
|
||||||
and $self->{'compressionMethod'} == COMPRESSION_STORED) {
|
|
||||||
$self->{'uncompressedSize'} = $self->{'compressedSize'};
|
|
||||||
}
|
|
||||||
|
|
||||||
$self->desiredCompressionMethod($self->compressionMethod());
|
|
||||||
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub rewindData {
|
|
||||||
my $self = shift;
|
|
||||||
|
|
||||||
my $status = $self->SUPER::rewindData(@_);
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
|
|
||||||
return AZ_IO_ERROR unless $self->fh();
|
|
||||||
|
|
||||||
$self->fh()->clearerr();
|
|
||||||
|
|
||||||
# Seek to local file header.
|
|
||||||
# The only reason that I'm doing this this way is that the extraField
|
|
||||||
# length seems to be different between the CD header and the LF header.
|
|
||||||
$status = $self->_seekToLocalHeader();
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
|
|
||||||
# skip local file header
|
|
||||||
$status = $self->_skipLocalFileHeader();
|
|
||||||
return $status unless $status == AZ_OK;
|
|
||||||
|
|
||||||
# Seek to beginning of file data
|
|
||||||
$self->fh()->seek($self->dataOffset(), IO::Seekable::SEEK_SET)
|
|
||||||
or return _ioError("seeking to beginning of file data");
|
|
||||||
|
|
||||||
return AZ_OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Return bytes read. Note that first parameter is a ref to a buffer.
|
|
||||||
# my $data;
|
|
||||||
# my ( $bytesRead, $status) = $self->readRawChunk( \$data, $chunkSize );
|
|
||||||
sub _readRawChunk {
|
|
||||||
my ($self, $dataRef, $chunkSize) = @_;
|
|
||||||
return (0, AZ_OK) unless $chunkSize;
|
|
||||||
my $bytesRead = $self->fh()->read($$dataRef, $chunkSize)
|
|
||||||
or return (0, _ioError("reading data"));
|
|
||||||
return ($bytesRead, AZ_OK);
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,119 +0,0 @@
|
||||||
package parent;
|
|
||||||
use strict;
|
|
||||||
use vars qw($VERSION);
|
|
||||||
$VERSION = '0.234';
|
|
||||||
|
|
||||||
sub import {
|
|
||||||
my $class = shift;
|
|
||||||
|
|
||||||
my $inheritor = caller(0);
|
|
||||||
|
|
||||||
if ( @_ and $_[0] eq '-norequire' ) {
|
|
||||||
shift @_;
|
|
||||||
} else {
|
|
||||||
for ( my @filename = @_ ) {
|
|
||||||
s{::|'}{/}g;
|
|
||||||
require "$_.pm"; # dies if the file is not found
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
no strict 'refs';
|
|
||||||
push @{"$inheritor\::ISA"}, @_;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
"All your base are belong to us"
|
|
||||||
|
|
||||||
__END__
|
|
||||||
|
|
||||||
=encoding utf8
|
|
||||||
|
|
||||||
=head1 NAME
|
|
||||||
|
|
||||||
parent - Establish an ISA relationship with base classes at compile time
|
|
||||||
|
|
||||||
=head1 SYNOPSIS
|
|
||||||
|
|
||||||
package Baz;
|
|
||||||
use parent qw(Foo Bar);
|
|
||||||
|
|
||||||
=head1 DESCRIPTION
|
|
||||||
|
|
||||||
Allows you to both load one or more modules, while setting up inheritance from
|
|
||||||
those modules at the same time. Mostly similar in effect to
|
|
||||||
|
|
||||||
package Baz;
|
|
||||||
BEGIN {
|
|
||||||
require Foo;
|
|
||||||
require Bar;
|
|
||||||
push @ISA, qw(Foo Bar);
|
|
||||||
}
|
|
||||||
|
|
||||||
By default, every base class needs to live in a file of its own.
|
|
||||||
If you want to have a subclass and its parent class in the same file, you
|
|
||||||
can tell C<parent> not to load any modules by using the C<-norequire> switch:
|
|
||||||
|
|
||||||
package Foo;
|
|
||||||
sub exclaim { "I CAN HAS PERL" }
|
|
||||||
|
|
||||||
package DoesNotLoadFooBar;
|
|
||||||
use parent -norequire, 'Foo', 'Bar';
|
|
||||||
# will not go looking for Foo.pm or Bar.pm
|
|
||||||
|
|
||||||
This is equivalent to the following code:
|
|
||||||
|
|
||||||
package Foo;
|
|
||||||
sub exclaim { "I CAN HAS PERL" }
|
|
||||||
|
|
||||||
package DoesNotLoadFooBar;
|
|
||||||
push @DoesNotLoadFooBar::ISA, 'Foo', 'Bar';
|
|
||||||
|
|
||||||
This is also helpful for the case where a package lives within
|
|
||||||
a differently named file:
|
|
||||||
|
|
||||||
package MyHash;
|
|
||||||
use Tie::Hash;
|
|
||||||
use parent -norequire, 'Tie::StdHash';
|
|
||||||
|
|
||||||
This is equivalent to the following code:
|
|
||||||
|
|
||||||
package MyHash;
|
|
||||||
require Tie::Hash;
|
|
||||||
push @ISA, 'Tie::StdHash';
|
|
||||||
|
|
||||||
If you want to load a subclass from a file that C<require> would
|
|
||||||
not consider an eligible filename (that is, it does not end in
|
|
||||||
either C<.pm> or C<.pmc>), use the following code:
|
|
||||||
|
|
||||||
package MySecondPlugin;
|
|
||||||
require './plugins/custom.plugin'; # contains Plugin::Custom
|
|
||||||
use parent -norequire, 'Plugin::Custom';
|
|
||||||
|
|
||||||
=head1 HISTORY
|
|
||||||
|
|
||||||
This module was forked from L<base> to remove the cruft
|
|
||||||
that had accumulated in it.
|
|
||||||
|
|
||||||
=head1 CAVEATS
|
|
||||||
|
|
||||||
=head1 SEE ALSO
|
|
||||||
|
|
||||||
L<base>
|
|
||||||
|
|
||||||
=head1 AUTHORS AND CONTRIBUTORS
|
|
||||||
|
|
||||||
Rafaël Garcia-Suarez, Bart Lateur, Max Maischein, Anno Siegel, Michael Schwern
|
|
||||||
|
|
||||||
=head1 MAINTAINER
|
|
||||||
|
|
||||||
Max Maischein C< corion@cpan.org >
|
|
||||||
|
|
||||||
Copyright (c) 2007-10 Max Maischein C<< <corion@cpan.org> >>
|
|
||||||
Based on the idea of C<base.pm>, which was introduced with Perl 5.004_04.
|
|
||||||
|
|
||||||
=head1 LICENSE
|
|
||||||
|
|
||||||
This module is released under the same terms as Perl itself.
|
|
||||||
|
|
||||||
=cut
|
|
|
@ -1,80 +0,0 @@
|
||||||
# Elasticsearch plugin descriptor file
|
|
||||||
# This file must exist as 'plugin-descriptor.properties' at
|
|
||||||
# the root directory of all plugins.
|
|
||||||
#
|
|
||||||
# A plugin can be 'site', 'jvm', or both.
|
|
||||||
#
|
|
||||||
### example site plugin for "foo":
|
|
||||||
#
|
|
||||||
# foo.zip <-- zip file for the plugin, with this structure:
|
|
||||||
# _site/ <-- the contents that will be served
|
|
||||||
# plugin-descriptor.properties <-- example contents below:
|
|
||||||
#
|
|
||||||
# site=true
|
|
||||||
# description=My cool plugin
|
|
||||||
# version=1.0
|
|
||||||
#
|
|
||||||
### example jvm plugin for "foo"
|
|
||||||
#
|
|
||||||
# foo.zip <-- zip file for the plugin, with this structure:
|
|
||||||
# <arbitrary name1>.jar <-- classes, resources, dependencies
|
|
||||||
# <arbitrary nameN>.jar <-- any number of jars
|
|
||||||
# plugin-descriptor.properties <-- example contents below:
|
|
||||||
#
|
|
||||||
# jvm=true
|
|
||||||
# classname=foo.bar.BazPlugin
|
|
||||||
# description=My cool plugin
|
|
||||||
# version=2.0.0-rc1
|
|
||||||
# elasticsearch.version=2.0
|
|
||||||
# java.version=1.7
|
|
||||||
#
|
|
||||||
### mandatory elements for all plugins:
|
|
||||||
#
|
|
||||||
# 'description': simple summary of the plugin
|
|
||||||
description=${project.description}
|
|
||||||
#
|
|
||||||
# 'version': plugin's version
|
|
||||||
version=${project.version}
|
|
||||||
#
|
|
||||||
# 'name': the plugin name
|
|
||||||
name=${elasticsearch.plugin.name}
|
|
||||||
|
|
||||||
### mandatory elements for site plugins:
|
|
||||||
#
|
|
||||||
# 'site': set to true to indicate contents of the _site/
|
|
||||||
# directory in the root of the plugin should be served.
|
|
||||||
site=${elasticsearch.plugin.site}
|
|
||||||
#
|
|
||||||
### mandatory elements for jvm plugins :
|
|
||||||
#
|
|
||||||
# 'jvm': true if the 'classname' class should be loaded
|
|
||||||
# from jar files in the root directory of the plugin.
|
|
||||||
# Note that only jar files in the root directory are
|
|
||||||
# added to the classpath for the plugin! If you need
|
|
||||||
# other resources, package them into a resources jar.
|
|
||||||
jvm=${elasticsearch.plugin.jvm}
|
|
||||||
#
|
|
||||||
# 'classname': the name of the class to load, fully-qualified.
|
|
||||||
classname=${elasticsearch.plugin.classname}
|
|
||||||
#
|
|
||||||
# 'java.version' version of java the code is built against
|
|
||||||
# use the system property java.specification.version
|
|
||||||
# version string must be a sequence of nonnegative decimal integers
|
|
||||||
# separated by "."'s and may have leading zeros
|
|
||||||
java.version=${maven.compiler.target}
|
|
||||||
#
|
|
||||||
# 'elasticsearch.version' version of elasticsearch compiled against
|
|
||||||
# You will have to release a new version of the plugin for each new
|
|
||||||
# elasticsearch release. This version is checked when the plugin
|
|
||||||
# is loaded so Elasticsearch will refuse to start in the presence of
|
|
||||||
# plugins with the incorrect elasticsearch.version.
|
|
||||||
elasticsearch.version=${elasticsearch.version}
|
|
||||||
#
|
|
||||||
### deprecated elements for jvm plugins :
|
|
||||||
#
|
|
||||||
# 'isolated': true if the plugin should have its own classloader.
|
|
||||||
# passing false is deprecated, and only intended to support plugins
|
|
||||||
# that have hard dependencies against each other. If this is
|
|
||||||
# not specified, then the plugin is isolated by default.
|
|
||||||
isolated=${elasticsearch.plugin.isolated}
|
|
||||||
#
|
|
|
@ -1,9 +0,0 @@
|
||||||
es.logger.level=INFO
|
|
||||||
log4j.rootLogger=${es.logger.level}, out
|
|
||||||
|
|
||||||
log4j.logger.org.apache.http=INFO, out
|
|
||||||
log4j.additivity.org.apache.http=false
|
|
||||||
|
|
||||||
log4j.appender.out=org.apache.log4j.ConsoleAppender
|
|
||||||
log4j.appender.out.layout=org.apache.log4j.PatternLayout
|
|
||||||
log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n
|
|
|
@ -1,7 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/DECORATION/1.0.0"
|
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/DECORATION/1.0.0 http://maven.apache.org/xsd/decoration-1.0.0.xsd">
|
|
||||||
<body>
|
|
||||||
</body>
|
|
||||||
</project>
|
|
|
@ -1,16 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
gradle assemble
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update distribution/licenses/ distribution/zip/build/distributions/elasticsearch-3.0.0-SNAPSHOT.zip elasticsearch-3.0.0-SNAPSHOT
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update plugins/analysis-icu/licenses/ plugins/analysis-icu/build/distributions/analysis-icu-3.0.0-SNAPSHOT.zip analysis-icu-3.0.0-SNAPSHOT
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update plugins/analysis-kuromoji/licenses/ plugins/analysis-kuromoji/build/distributions/analysis-kuromoji-3.0.0-SNAPSHOT.zip analysis-kuromoji-3.0.0-SNAPSHOT
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update plugins/analysis-phonetic/licenses/ plugins/analysis-phonetic/build/distributions/analysis-phonetic-3.0.0-SNAPSHOT.zip analysis-phonetic-3.0.0-SNAPSHOT
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update plugins/analysis-smartcn/licenses/ plugins/analysis-smartcn/build/distributions/analysis-smartcn-3.0.0-SNAPSHOT.zip analysis-smartcn-3.0.0-SNAPSHOT
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update plugins/analysis-stempel/licenses/ plugins/analysis-stempel/build/distributions/analysis-stempel-3.0.0-SNAPSHOT.zip analysis-stempel-3.0.0-SNAPSHOT
|
|
||||||
perl dev-tools/src/main/resources/license-check/check_license_and_sha.pl \
|
|
||||||
--update plugins/lang-expression/licenses/ plugins/lang-expression/build/distributions/lang-expression-3.0.0-SNAPSHOT.zip lang-expression-3.0.0-SNAPSHOT
|
|
Loading…
Reference in New Issue