using more java.nio.file

This commit is contained in:
Olivier Lamy 2014-04-17 09:57:30 +10:00
parent b2ebfd6cc2
commit bb30140719
5 changed files with 45 additions and 36 deletions

View File

@ -43,6 +43,8 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -259,7 +261,7 @@ public class ArchivaCli
Properties p = new Properties();
try (FileInputStream fis = new FileInputStream( properties ))
try (InputStream fis = Files.newInputStream( Paths.get(properties)))
{
p.load( fis );
}

View File

@ -27,6 +27,8 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
@ -75,7 +77,7 @@ public class ChecksummedFile
throws IOException
{
try (FileInputStream fis = new FileInputStream( referenceFile ))
try (InputStream fis = Files.newInputStream( referenceFile.toPath() ) )
{
Checksum checksum = new Checksum( checksumAlgorithm );
checksum.update( fis );
@ -139,7 +141,7 @@ public class ChecksummedFile
public boolean isValidChecksums( ChecksumAlgorithm algorithms[] )
{
try (FileInputStream fis = new FileInputStream( referenceFile ))
try (InputStream fis = Files.newInputStream( referenceFile.toPath() ))
{
List<Checksum> checksums = new ArrayList<>( algorithms.length );
// Create checksum object for each algorithm.
@ -228,7 +230,7 @@ public class ChecksummedFile
}
try (FileInputStream fis = new FileInputStream( referenceFile ))
try (InputStream fis = Files.newInputStream( referenceFile.toPath() ))
{
// Parse file once, for all checksums.
Checksum.update( checksums, fis );

View File

@ -22,6 +22,7 @@ package org.apache.archiva.configuration;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Map;
import java.util.Properties;
import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
@ -48,7 +49,7 @@ public class MavenProxyPropertyLoaderTest
proxy.setHost( "original-host" );
configuration.addNetworkProxy( proxy ); // overwritten
loader.load( new FileInputStream( confFile ), configuration );
loader.load( Files.newInputStream(confFile.toPath()), configuration );
Map<String, ManagedRepositoryConfiguration> repositoryIdMap = configuration.getManagedRepositoriesAsMap();
assertEquals( "Count repositories", 1, repositoryIdMap.size() );

View File

@ -56,6 +56,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
@ -135,7 +136,8 @@ public class DownloadRemoteIndexTask
final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
new WagonFactoryRequest( wagonProtocol, this.remoteRepository.getExtraHeaders() ).networkProxy(
this.networkProxy ) );
this.networkProxy )
);
// FIXME olamy having 2 config values
wagon.setReadTimeout( remoteRepository.getRemoteDownloadTimeout() * 1000 );
wagon.setTimeout( remoteRepository.getTimeout() * 1000 );
@ -345,13 +347,10 @@ public class DownloadRemoteIndexTask
{
log.info( "index update retrieve file, name:{}", name );
File file = new File( tempIndexDirectory, name );
if ( file.exists() )
{
file.delete();
}
Files.deleteIfExists( file.toPath() );
file.deleteOnExit();
wagon.get( addParameters( name, this.remoteRepository ), file );
return new FileInputStream( file );
return Files.newInputStream( file.toPath() );
}
catch ( AuthorizationException e )
{

View File

@ -47,10 +47,12 @@ import javax.inject.Inject;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@ -156,7 +158,8 @@ public class ArchivaIndexingTaskExecutorTest
new File( repositoryConfig.getLocation() ),
new File( repositoryConfig.getLocation(),
".indexer" ), null, null,
mavenIndexerUtils.getAllIndexCreators() );
mavenIndexerUtils.getAllIndexCreators()
);
context.setSearchable( true );
}
@ -257,8 +260,8 @@ public class ArchivaIndexingTaskExecutorTest
q.add( indexer.constructQuery( MAVEN.GROUP_ID, new SourcedSearchExpression( "org.apache.archiva" ) ),
Occur.SHOULD );
q.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ),
Occur.SHOULD );
new SourcedSearchExpression( "archiva-index-methods-jar-test" ) ), Occur.SHOULD
);
assertTrue( new File( repositoryConfig.getLocation(), ".indexer" ).exists() );
assertFalse( new File( repositoryConfig.getLocation(), ".index" ).exists() );
@ -345,10 +348,10 @@ public class ArchivaIndexingTaskExecutorTest
{
final int buff = 2048;
new File( destDir ).mkdirs();
Files.createDirectories( Paths.get( destDir ) );
BufferedOutputStream out = null;
FileInputStream fin = new FileInputStream( new File( indexDir, "nexus-maven-repository-index.zip" ) );
try (InputStream fin = Files.newInputStream( Paths.get( indexDir, "nexus-maven-repository-index.zip" ) ))
{
ZipInputStream in = new ZipInputStream( new BufferedInputStream( fin ) );
ZipEntry entry;
@ -356,17 +359,19 @@ public class ArchivaIndexingTaskExecutorTest
{
int count;
byte data[] = new byte[buff];
FileOutputStream fout = new FileOutputStream( new File( destDir, entry.getName() ) );
out = new BufferedOutputStream( fout, buff );
try (OutputStream fout = Files.newOutputStream( Paths.get( destDir, entry.getName() ) ))
{
try (BufferedOutputStream out = new BufferedOutputStream( fout, buff ))
{
while ( ( count = in.read( data, 0, buff ) ) != -1 )
{
out.write( data, 0, count );
}
out.flush();
out.close();
}
}
}
in.close();
}
}
}