Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/ISSUE_TEMPLATE/feature_request.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ body:
- Build
- Arrow
- Avro
- Pig
- Protobuf
- Thrift
- CLI
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ Parquet is an active project, and new features are being added quickly. Here are

* Type-specific encoding
* Hive integration (deprecated)
* Pig integration (deprecated)
* Cascading integration (deprecated)
* Crunch integration
* Apache Arrow integration
Expand Down
26 changes: 1 addition & 25 deletions parquet-thrift/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
<scope>provided</scope>
</dependency>
<!-- Guava is a dependency of hadoop-common, but scoped to compile. We need to
explicity declare it as a test dependency. -->
explicitly declare it as a test dependency. -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
Expand All @@ -89,18 +89,6 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.twitter.elephantbird</groupId>
<artifactId>elephant-bird-pig</artifactId>
<version>${elephant-bird.version}</version>
<exclusions>
<!-- hadoop-lzo is not required for parquet build/tests and there are issues downloading it -->
<exclusion>
<groupId>com.hadoop.gplcompression</groupId>
<artifactId>hadoop-lzo</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-jackson</artifactId>
Expand All @@ -124,18 +112,6 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-pig</artifactId>
<version>1.15.0</version>
</dependency>
<dependency>
<groupId>org.apache.pig</groupId>
<artifactId>pig</artifactId>
<version>${pig.version}</version>
<classifier>${pig.classifier}</classifier>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
*/
package org.apache.parquet.hadoop.thrift;

import com.twitter.elephantbird.pig.util.ThriftToPig;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.parquet.conf.HadoopParquetConfiguration;
Expand All @@ -25,13 +24,11 @@
import org.apache.parquet.io.ColumnIOFactory;
import org.apache.parquet.io.MessageColumnIO;
import org.apache.parquet.io.api.RecordConsumer;
import org.apache.parquet.pig.PigMetaData;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.thrift.ParquetWriteProtocol;
import org.apache.parquet.thrift.ThriftMetaData;
import org.apache.parquet.thrift.ThriftSchemaConverter;
import org.apache.parquet.thrift.struct.ThriftType.StructType;
import org.apache.thrift.TBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -99,26 +96,10 @@ protected void init(Class<T> thriftClass) {

final Map<String, String> extraMetaData =
new ThriftMetaData(thriftClass.getName(), thriftStruct).toExtraMetaData();
// adding the Pig schema as it would have been mapped from thrift
// TODO: make this work for non-tbase types
if (isPigLoaded() && TBase.class.isAssignableFrom(thriftClass)) {
new PigMetaData(new ThriftToPig((Class<? extends TBase<?, ?>>) thriftClass).toSchema())
.addToMetaData(extraMetaData);
}

this.writeContext = new WriteContext(schema, extraMetaData);
}

protected boolean isPigLoaded() {
try {
Class.forName("org.apache.pig.impl.logicalLayer.schema.Schema");
return true;
} catch (ClassNotFoundException e) {
LOG.info("Pig is not loaded, pig metadata will not be written");
return false;
}
}

@Override
public WriteContext init(Configuration configuration) {
return init(new HadoopParquetConfiguration(configuration));
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ default R visit(UUIDType uuidType, S state) {
/**
* @deprecated will be removed in 2.0.0; use StateVisitor instead.
*/
@Deprecated
public interface TypeVisitor {

void visit(MapType mapType);
Expand Down
Loading