Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.util.Map;

import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.MemoryAllocator;

/**
* A simple {@link MemoryAllocator} that can allocate up to 16GB using a JVM long primitive array.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ public interface MemoryAllocator {
* Whether to fill newly allocated and deallocated memory with 0xa5 and 0x5a bytes respectively.
* This helps catch misuse of uninitialized or freed memory, but imposes some overhead.
*/
public static final boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
System.getProperty("spark.memory.debugFill", "false"));

// Same as jemalloc's debug fill values.
public static final byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
public static final byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;

/**
* Allocates a contiguous block of memory. Note that the allocated memory is not guaranteed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -780,8 +780,7 @@ private[sql] object ParquetFileFormat extends Logging {
val assumeBinaryIsString = sparkSession.sessionState.conf.isParquetBinaryAsString
val assumeInt96IsTimestamp = sparkSession.sessionState.conf.isParquetINT96AsTimestamp
val writeLegacyParquetFormat = sparkSession.sessionState.conf.writeLegacyParquetFormat
val serializedConf =
new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
val serializedConf = new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())

// !! HACK ALERT !!
//
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.sources

import org.apache.spark.sql._
import org.apache.spark.sql.internal.SQLConf

private[sql] abstract class DataSourceTest extends QueryTest {

Expand Down