atom feed1 message in org.apache.incubator.accumulo-commitssvn commit: r1239796 - in /incubator/...
FromSent OnAttachments
bil...@apache.orgFeb 2, 2012 11:56 am 
Subject:svn commit: r1239796 - in /incubator/accumulo/branches/1.4: docs/examples/ src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/dirlist/ src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ src/examples/simpl...
From:bil...@apache.org (bil@apache.org)
Date:Feb 2, 2012 11:56:12 am
List:org.apache.incubator.accumulo-commits

Author: billie Date: Thu Feb 2 19:56:12 2012 New Revision: 1239796

URL: http://svn.apache.org/viewvc?rev=1239796&view=rev Log: ACCUMULO-274 added instructions for running filedata example, improved handling
of bad data, created histogram example

Added: incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/CharacterHistogram.java
(with props) Modified: incubator/accumulo/branches/1.4/docs/examples/README.dirlist incubator/accumulo/branches/1.4/docs/examples/README.filedata incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/dirlist/Ingest.java incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkCombiner.java incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormat.java incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStream.java incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataIngest.java incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataQuery.java incubator/accumulo/branches/1.4/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStreamTest.java

Modified: incubator/accumulo/branches/1.4/docs/examples/README.dirlist URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/docs/examples/README.dirlist?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== --- incubator/accumulo/branches/1.4/docs/examples/README.dirlist (original) +++ incubator/accumulo/branches/1.4/docs/examples/README.dirlist Thu Feb 2
19:56:12 2012 @@ -24,7 +24,7 @@ This example stores filesystem informati

This example shows how to use Accumulo to store a file system history. It has
the following classes:

- * Ingest.java - Recursively lists the files and directories under a given
path, ingests their names and file info (not the file data!) into an Accumulo
table, and indexes the file names in a separate table. + * Ingest.java - Recursively lists the files and directories under a given
path, ingests their names and file info into one Accumulo table, indexes the
file names in a separate table, and the file data into a third table. * QueryUtil.java - Provides utility methods for getting the info for a file,
listing the contents of a directory, and performing single wild card searches on
file or directory names. * Viewer.java - Provides a GUI for browsing the file system information stored
in Accumulo. * FileCount.java - Computes recursive counts over file system information and
stores them back into the same Accumulo table.

Modified: incubator/accumulo/branches/1.4/docs/examples/README.filedata URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/docs/examples/README.filedata?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== --- incubator/accumulo/branches/1.4/docs/examples/README.filedata (original) +++ incubator/accumulo/branches/1.4/docs/examples/README.filedata Thu Feb 2
19:56:12 2012 @@ -19,12 +19,29 @@ Notice: Licensed to the Apache Softwa This example archives file data into an Accumulo table. Files with duplicate
data are only stored once. The example has the following classes:

+ * CharacterHistogram - A MapReduce that computes a histogram of byte frequency
for each file and stores the histogram alongside the file data. An example use
of the ChunkInputFormat. * ChunkCombiner - An Iterator that dedupes file data and sets their
visibilities to a combined visibility based on current references to the file
data. * ChunkInputFormat - An Accumulo InputFormat that provides keys containing
file info (List<Entry<Key,Value>>) and values with an InputStream over the file
(ChunkInputStream). * ChunkInputStream - An input stream over file data stored in Accumulo. * FileDataIngest - Takes a list of files and archives them into Accumulo keyed
on the SHA1 hashes of the files. - * FileDataQuery - Retrieves file data based on the SHA1 hash of the file. + * FileDataQuery - Retrieves file data based on the SHA1 hash of the file.
(Used by the filedata.Viewer.) * KeyUtil - A utility for creating and parsing null-byte separated strings
into/from Text objects. * VisibilityCombiner - A utility for merging visibilities into the form
(VIS1)|(VIS2)|...

-This example is coupled with the dirlist example. See README.dirlist for
instructions. \ No newline at end of file +This example is coupled with the dirlist example. See README.dirlist for
instructions. + +If you haven't already run the README.dirlist example, ingest a file with
FileDataIngest. + + $ ./bin/accumulo
org.apache.accumulo.examples.simple.filedata.FileDataIngest instance zookeepers
username password dataTable exampleVis 1000 $ACCUMULO_HOME/README + +Open the accumulo shell and look at the data. The row is the MD5 hash of the
file, which you can verify by running a command such as 'md5sum' on the file. + + > scan -t dataTable + +Run the CharacterHistogram MapReduce to add some information about the file. + + $ bin/tool.sh lib/examples-simple*[^c].jar
org.apache.accumulo.examples.simple.filedata.CharacterHistogram instance
zookeepers username password dataTable exampleVis exampleVis + +Scan again to see the histogram stored in the 'info' column family. + + > scan -t dataTable

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/dirlist/Ingest.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/dirlist/Ingest.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/dirlist/Ingest.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/dirlist/Ingest.java
Thu Feb 2 19:56:12 2012 @@ -21,12 +21,14 @@ import java.io.IOException;

import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.Connector; +import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.LongCombiner; import org.apache.accumulo.core.iterators.TypedValueCombiner.Encoder; import org.apache.accumulo.core.security.ColumnVisibility; +import org.apache.accumulo.examples.simple.filedata.ChunkCombiner; import org.apache.accumulo.examples.simple.filedata.FileDataIngest; import org.apache.hadoop.io.Text;

@@ -131,8 +133,10 @@ public class Ingest { conn.tableOperations().create(nameTable); if (!conn.tableOperations().exists(indexTable)) conn.tableOperations().create(indexTable); - if (!conn.tableOperations().exists(dataTable)) + if (!conn.tableOperations().exists(dataTable)) { conn.tableOperations().create(dataTable); + conn.tableOperations().attachIterator(dataTable, new IteratorSetting(1,
ChunkCombiner.class)); + }

BatchWriter dirBW = conn.createBatchWriter(nameTable, 50000000, 300000l,
4); BatchWriter indexBW = conn.createBatchWriter(indexTable, 50000000, 300000l,
4);

Added:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/CharacterHistogram.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/CharacterHistogram.java?rev=1239796&view=auto ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/CharacterHistogram.java
(added) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/CharacterHistogram.java
Thu Feb 2 19:56:12 2012 @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.accumulo.examples.simple.filedata; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.List; +import java.util.Map.Entry; + +import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat; +import org.apache.accumulo.core.data.Key; +import org.apache.accumulo.core.data.Mutation; +import org.apache.accumulo.core.data.Value; +import org.apache.accumulo.core.iterators.user.SummingArrayCombiner; +import org.apache.accumulo.core.security.Authorizations; +import org.apache.accumulo.core.security.ColumnVisibility; +import org.apache.accumulo.core.util.CachedConfiguration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; + +/** + * + */ +public class CharacterHistogram extends Configured implements Tool { + public static final String VIS = "vis"; + + public static void main(String[] args) throws Exception { + System.exit(ToolRunner.run(CachedConfiguration.getInstance(), new
CharacterHistogram(), args)); + } + + public static class HistMapper extends
Mapper<List<Entry<Key,Value>>,InputStream,Text,Mutation> { + private ColumnVisibility cv; + + public void map(List<Entry<Key,Value>> k, InputStream v, Context context)
throws IOException, InterruptedException { + Long[] hist = new Long[256]; + for (int i = 0; i < hist.length; i++) + hist[i] = 0l; + int b = v.read(); + while (b >= 0) { + hist[b] += 1l; + b = v.read(); + } + v.close(); + Mutation m = new Mutation(k.get(0).getKey().getRow()); + m.put("info", "hist", cv, new
Value(SummingArrayCombiner.STRING_ARRAY_ENCODER.encode(Arrays.asList(hist)))); + context.write(new Text(), m); + } + + @Override + protected void setup(Context context) throws IOException,
InterruptedException { + cv = new ColumnVisibility(context.getConfiguration().get(VIS, "")); + } + } + + @Override + public int run(String[] args) throws Exception { + Job job = new Job(getConf(), this.getClass().getSimpleName()); + job.setJarByClass(this.getClass()); + + job.setInputFormatClass(ChunkInputFormat.class); + ChunkInputFormat.setZooKeeperInstance(job.getConfiguration(), args[0],
args[1]); + ChunkInputFormat.setInputInfo(job.getConfiguration(), args[2],
args[3].getBytes(), args[4], new Authorizations(args[5].split(","))); + job.getConfiguration().set(VIS, args[6]); + + job.setMapperClass(HistMapper.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(Mutation.class); + + job.setNumReduceTasks(0); + + job.setOutputFormatClass(AccumuloOutputFormat.class); + AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0],
args[1]); + AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2],
args[3].getBytes(), false, args[4]); + + job.waitForCompletion(true); + return job.isSuccessful() ? 0 : 1; + } +}

Propchange:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/CharacterHistogram.java

------------------------------------------------------------------------------ svn:eol-style = native

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkCombiner.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkCombiner.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkCombiner.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkCombiner.java
Thu Feb 2 19:56:12 2012 @@ -30,7 +30,7 @@ import org.apache.accumulo.core.iterator import org.apache.accumulo.core.iterators.SortedKeyValueIterator; import org.apache.hadoop.io.Text;

-/* +/** * This iterator dedupes chunks and sets their visibilities to the combined * visibility of the refs columns. For example, it would combine *

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormat.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormat.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormat.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormat.java
Thu Feb 2 19:56:12 2012 @@ -31,6 +31,9 @@ import org.apache.hadoop.mapreduce.Input import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext;

+/** + * An InputFormat that turns the file data ingested with FileDataIngest into an
InputStream. Mappers used with this InputFormat must close the InputStream. + */ public class ChunkInputFormat extends
InputFormatBase<List<Entry<Key,Value>>,InputStream> { @Override public RecordReader<List<Entry<Key,Value>>,InputStream>
createRecordReader(InputSplit split, TaskAttemptContext context) throws
IOException,

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStream.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStream.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStream.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStream.java
Thu Feb 2 19:56:12 2012 @@ -47,13 +47,13 @@ public class ChunkInputStream extends In source = null; }

- public ChunkInputStream(PeekingIterator<Entry<Key,Value>> in) { + public ChunkInputStream(PeekingIterator<Entry<Key,Value>> in) throws
IOException { setSource(in); }

- public void setSource(PeekingIterator<Entry<Key,Value>> in) { + public void setSource(PeekingIterator<Entry<Key,Value>> in) throws
IOException { if (source != null) - throw new RuntimeException("setting new source without closing old one"); + throw new IOException("setting new source without closing old one"); this.source = in; currentVis = new TreeSet<Text>(); count = pos = 0; @@ -81,6 +81,12 @@ public class ChunkInputStream extends In currentChunk =
FileDataIngest.bytesToInt(currentKey.getColumnQualifier().getBytes(), 4); currentChunkSize =
FileDataIngest.bytesToInt(currentKey.getColumnQualifier().getBytes(), 0); gotEndMarker = false; + if (buf.length == 0) + gotEndMarker = true; + if (currentChunk != 0) { + source = null; + throw new IOException("starting chunk number isn't 0 for " +
currentKey.getRow()); + } }

private int fill() throws IOException { @@ -163,7 +169,7 @@ public class ChunkInputStream extends In

public Set<Text> getVisibilities() { if (source != null) - throw new RuntimeException("don't get visibilities before chunks have
been completely read"); + throw new IllegalStateException("don't get visibilities before chunks
have been completely read"); return currentVis; }

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataIngest.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataIngest.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataIngest.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataIngest.java
Thu Feb 2 19:56:12 2012 @@ -24,6 +24,7 @@ import java.security.NoSuchAlgorithmExce

import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.Connector; +import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.MutationsRejectedException; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.data.ArrayByteSequence; @@ -165,8 +166,10 @@ public class FileDataIngest { int chunkSize = Integer.parseInt(args[6]);

Connector conn = new ZooKeeperInstance(instance,
zooKeepers).getConnector(user, pass.getBytes()); - if (!conn.tableOperations().exists(dataTable)) + if (!conn.tableOperations().exists(dataTable)) { conn.tableOperations().create(dataTable); + conn.tableOperations().attachIterator(dataTable, new IteratorSetting(1,
ChunkCombiner.class)); + } BatchWriter bw = conn.createBatchWriter(dataTable, 50000000, 300000l, 4); FileDataIngest fdi = new FileDataIngest(chunkSize, colvis); for (int i = 7; i < args.length; i++) {

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataQuery.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataQuery.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataQuery.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/filedata/FileDataQuery.java
Thu Feb 2 19:56:12 2012 @@ -52,7 +52,7 @@ public class FileDataQuery { return lastRefs; }

- public ChunkInputStream getData(String hash) { + public ChunkInputStream getData(String hash) throws IOException { scanner.setRange(new Range(hash)); scanner.setBatchSize(1); lastRefs.clear();

Modified:
incubator/accumulo/branches/1.4/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStreamTest.java URL:
http://svn.apache.org/viewvc/incubator/accumulo/branches/1.4/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStreamTest.java?rev=1239796&r1=1239795&r2=1239796&view=diff ============================================================================== ---
incubator/accumulo/branches/1.4/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStreamTest.java
(original) +++
incubator/accumulo/branches/1.4/src/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputStreamTest.java
Thu Feb 2 19:56:12 2012 @@ -38,8 +38,6 @@ import org.apache.accumulo.core.data.Val import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.accumulo.core.util.PeekingIterator; -import org.apache.accumulo.examples.simple.filedata.ChunkInputStream; -import org.apache.accumulo.examples.simple.filedata.FileDataIngest; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger;

@@ -65,6 +63,9 @@ public class ChunkInputStreamTest extend addData(data, "c", "~chunk", 100, 0, "A&B", "asdfjkl;"); addData(data, "c", "~chunk", 100, 1, "A&B", "asdfjkl;"); addData(data, "c", "~chunk", 100, 2, "A&B", ""); + addData(data, "d", "~chunk", 100, 0, "A&B", ""); + addData(data, "e", "~chunk", 100, 0, "A&B", "asdfjkl;"); + addData(data, "e", "~chunk", 100, 1, "A&B", ""); baddata = new ArrayList<Entry<Key,Value>>(); addData(baddata, "a", "~chunk", 100, 0, "A", "asdfjkl;"); addData(baddata, "b", "~chunk", 100, 0, "B", "asdfjkl;"); @@ -77,6 +78,8 @@ public class ChunkInputStreamTest extend addData(baddata, "e", "~chunk", 100, 0, "I", "asdfjkl;"); addData(baddata, "e", "~chunk", 100, 1, "J", ""); addData(baddata, "e", "~chunk", 100, 2, "I", "asdfjkl;"); + addData(baddata, "f", "~chunk", 100, 2, "K", "asdfjkl;"); + addData(baddata, "g", "~chunk", 100, 0, "L", ""); multidata = new ArrayList<Entry<Key,Value>>(); addData(multidata, "a", "~chunk", 100, 0, "A&B", "asdfjkl;"); addData(multidata, "a", "~chunk", 100, 1, "A&B", ""); @@ -106,7 +109,7 @@ public class ChunkInputStreamTest extend try { cis.setSource(pi); assertNotNull(null); - } catch (RuntimeException e) { + } catch (IOException e) { assertNull(null); } cis.close(); @@ -167,6 +170,14 @@ public class ChunkInputStreamTest extend assertEquals(read = cis.read(b), -1); cis.close();

+ cis.setSource(pi); + assertEquals(read = cis.read(b), 5); + assertEquals(new String(b, 0, read), "asdfj"); + assertEquals(read = cis.read(b), 3); + assertEquals(new String(b, 0, read), "kl;"); + assertEquals(read = cis.read(b), -1); + cis.close(); + assertFalse(pi.hasNext()); }

@@ -199,6 +210,12 @@ public class ChunkInputStreamTest extend assertEquals(read = cis.read(b), -1); cis.close();

+ cis.setSource(pi); + assertEquals(read = cis.read(b), 8); + assertEquals(new String(b, 0, read), "asdfjkl;"); + assertEquals(read = cis.read(b), -1); + cis.close(); + assertFalse(pi.hasNext()); }

@@ -245,6 +262,12 @@ public class ChunkInputStreamTest extend assertEquals(read = cis.read(b), -1); cis.close();

+ cis.setSource(pi); + assertEquals(read = cis.read(b), 8); + assertEquals(new String(b, 0, read), "asdfjkl;"); + assertEquals(read = cis.read(b), -1); + cis.close(); + assertFalse(pi.hasNext()); }

@@ -302,6 +325,20 @@ public class ChunkInputStreamTest extend cis.close(); assertEquals(cis.getVisibilities().toString(), "[I, J]");

+ try { + cis.setSource(pi); + assertNotNull(null); + } catch (IOException e) { + assertNull(null); + } + assumeExceptionOnClose(cis); + assertEquals(cis.getVisibilities().toString(), "[K]"); + + cis.setSource(pi); + assertEquals(read = cis.read(b), -1); + assertEquals(cis.getVisibilities().toString(), "[L]"); + cis.close(); + assertFalse(pi.hasNext());

pi = new PeekingIterator<Entry<Key,Value>>(baddata.iterator()); @@ -339,6 +376,19 @@ public class ChunkInputStreamTest extend assumeExceptionOnRead(cis, b); assertEquals(cis.getVisibilities().toString(), "[I, J]");

+ try { + cis.setSource(pi); + assertNotNull(null); + } catch (IOException e) { + assertNull(null); + } + assertEquals(cis.getVisibilities().toString(), "[K]"); + + cis.setSource(pi); + assertEquals(read = cis.read(b), -1); + assertEquals(cis.getVisibilities().toString(), "[L]"); + cis.close(); + assertFalse(pi.hasNext());

pi = new PeekingIterator<Entry<Key,Value>>(baddata.iterator());