Commit 0a1e8d16 authored by Sam Tunnicliffe's avatar Sam Tunnicliffe

Merge branch 'cassandra-2.2' into cassandra-3.0

parents c092c461 c8c3c269
......@@ -14,6 +14,7 @@
* Memtable memory allocations may deadlock (CASSANDRA-15367)
* Run evictFromMembership in GossipStage (CASSANDRA-15592)
Merged from 2.2:
* Fix nomenclature of allow and deny lists (CASSANDRA-15862)
* Remove generated files from source artifact (CASSANDRA-15849)
* Remove duplicated tools binaries from tarballs (CASSANDRA-15768)
* Duplicate results with DISTINCT queries in mixed mode (CASSANDRA-15501)
......
......@@ -76,19 +76,19 @@ public abstract class UDFunction extends AbstractFunction implements ScalarFunct
protected final boolean calledOnNullInput;
//
// Access to classes is controlled via a whitelist and a blacklist.
// Access to classes is controlled via allow and disallow lists.
//
// When a class is requested (both during compilation and runtime),
// the whitelistedPatterns array is searched first, whether the
// the allowedPatterns array is searched first, whether the
// requested name matches one of the patterns. If not, nothing is
// returned from the class-loader - meaning ClassNotFoundException
// during runtime and "type could not resolved" during compilation.
//
// If a whitelisted pattern has been found, the blacklistedPatterns
// If an allowed pattern has been found, the disallowedPatterns
// array is searched for a match. If a match is found, class-loader
// rejects access. Otherwise the class/resource can be loaded.
//
private static final String[] whitelistedPatterns =
private static final String[] allowedPatterns =
{
"com/datastax/driver/core/",
"com/google/common/reflect/TypeToken",
......@@ -110,8 +110,8 @@ public abstract class UDFunction extends AbstractFunction implements ScalarFunct
"org/apache/cassandra/cql3/functions/JavaUDF.class",
"org/apache/cassandra/exceptions/",
};
// Only need to blacklist a pattern, if it would otherwise be allowed via whitelistedPatterns
private static final String[] blacklistedPatterns =
// Only need to disallow a pattern, if it would otherwise be allowed via allowedPatterns
private static final String[] disallowedPatterns =
{
"com/datastax/driver/core/Cluster.class",
"com/datastax/driver/core/Metrics.class",
......@@ -154,13 +154,13 @@ public abstract class UDFunction extends AbstractFunction implements ScalarFunct
while (resource.startsWith("/"))
resource = resource.substring(1);
for (String white : whitelistedPatterns)
if (resource.startsWith(white))
for (String allowed : allowedPatterns)
if (resource.startsWith(allowed))
{
// resource is in whitelistedPatterns, let's see if it is not explicityl blacklisted
for (String black : blacklistedPatterns)
if (resource.startsWith(black))
// resource is in allowedPatterns, let's see if it is not explicitly disallowed
for (String disallowed : disallowedPatterns)
if (resource.startsWith(disallowed))
{
logger.trace("access denied: resource {}", resource);
return false;
......
......@@ -310,10 +310,10 @@ public class Directories
/**
* Basically the same as calling {@link #getWriteableLocationAsFile(long)} with an unknown size ({@code -1L}),
* which may return any non-blacklisted directory - even a data directory that has no usable space.
* which may return any allowed directory - even a data directory that has no usable space.
* Do not use this method in production code.
*
* @throws IOError if all directories are blacklisted.
* @throws IOError if all directories are blocked.
*/
public File getDirectoryForNewSSTables()
{
......@@ -321,9 +321,9 @@ public class Directories
}
/**
* Returns a non-blacklisted data directory that _currently_ has {@code writeSize} bytes as usable space.
* Returns an allowed directory that _currently_ has {@code writeSize} bytes as usable space.
*
* @throws IOError if all directories are blacklisted.
* @throws IOError if all directories are disallowed.
*/
public File getWriteableLocationAsFile(long writeSize)
{
......@@ -331,11 +331,11 @@ public class Directories
}
/**
* Returns a temporary subdirectory on non-blacklisted data directory
* Returns a temporary subdirectory on allowed data directory
* that _currently_ has {@code writeSize} bytes as usable space.
* This method does not create the temporary directory.
*
* @throws IOError if all directories are blacklisted.
* @throws IOError if all directories are disallowed.
*/
public File getTemporaryWriteableDirectoryAsFile(long writeSize)
{
......@@ -359,9 +359,9 @@ public class Directories
}
/**
* Returns a non-blacklisted data directory that _currently_ has {@code writeSize} bytes as usable space.
* Returns an allowed data directory that _currently_ has {@code writeSize} bytes as usable space.
*
* @throws IOError if all directories are blacklisted.
* @throws IOError if all directories are disallowed.
*/
public DataDirectory getWriteableLocation(long writeSize)
{
......@@ -369,13 +369,13 @@ public class Directories
long totalAvailable = 0L;
// pick directories with enough space and so that resulting sstable dirs aren't blacklisted for writes.
// pick directories with enough space and so that resulting sstable dirs aren't disallowed for writes.
boolean tooBig = false;
for (DataDirectory dataDir : paths)
{
if (BlacklistedDirectories.isUnwritable(getLocationForDisk(dataDir)))
if (DisallowedDirectories.isUnwritable(getLocationForDisk(dataDir)))
{
logger.trace("removing blacklisted candidate {}", dataDir.location);
logger.trace("removing disallowed candidate {}", dataDir.location);
continue;
}
DataDirectoryCandidate candidate = new DataDirectoryCandidate(dataDir);
......@@ -394,7 +394,7 @@ public class Directories
if (tooBig)
throw new FSDiskFullWriteError(new IOException("Insufficient disk space to write " + writeSize + " bytes"), "");
else
throw new FSWriteError(new IOException("All configured data directories have been blacklisted as unwritable for erroring out"), "");
throw new FSWriteError(new IOException("All configured data directories have been disallowed as unwritable for erroring out"), "");
// shortcut for single data directory systems
if (candidates.size() == 1)
......@@ -439,7 +439,7 @@ public class Directories
for (DataDirectory dataDir : paths)
{
if (BlacklistedDirectories.isUnwritable(getLocationForDisk(dataDir)))
if (DisallowedDirectories.isUnwritable(getLocationForDisk(dataDir)))
continue;
DataDirectoryCandidate candidate = new DataDirectoryCandidate(dataDir);
// exclude directory if its total writeSize does not fit to data directory
......@@ -691,7 +691,7 @@ public class Directories
for (File location : dataPaths)
{
if (BlacklistedDirectories.isUnreadable(location))
if (DisallowedDirectories.isUnreadable(location))
continue;
if (snapshotName != null)
......
......@@ -27,21 +27,22 @@ import java.util.concurrent.CopyOnWriteArraySet;
import com.google.common.annotations.VisibleForTesting;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.MBeanWrapper;
public class BlacklistedDirectories implements BlacklistedDirectoriesMBean
public class DisallowedDirectories implements DisallowedDirectoriesMBean
{
public static final String MBEAN_NAME = "org.apache.cassandra.db:type=BlacklistedDirectories";
private static final Logger logger = LoggerFactory.getLogger(BlacklistedDirectories.class);
private static final BlacklistedDirectories instance = new BlacklistedDirectories();
public static final String DEPRECATED_MBEAN_NAME = "org.apache.cassandra.db:type=BlacklistedDirectories";
public static final String MBEAN_NAME = "org.apache.cassandra.db:type=DisallowedDirectories";
private static final Logger logger = LoggerFactory.getLogger(DisallowedDirectories.class);
private static final DisallowedDirectories instance = new DisallowedDirectories();
private final Set<File> unreadableDirectories = new CopyOnWriteArraySet<File>();
private final Set<File> unwritableDirectories = new CopyOnWriteArraySet<File>();
private BlacklistedDirectories()
private DisallowedDirectories()
{
// Register this instance with JMX
MBeanWrapper.instance.registerMBean(this, DEPRECATED_MBEAN_NAME, MBeanWrapper.OnException.LOG);
MBeanWrapper.instance.registerMBean(this, MBEAN_NAME, MBeanWrapper.OnException.LOG);
}
......@@ -59,14 +60,14 @@ public class BlacklistedDirectories implements BlacklistedDirectoriesMBean
* Adds parent directory of the file (or the file itself, if it is a directory)
* to the set of unreadable directories.
*
* @return the blacklisted directory or null if nothing has been added to the list.
* @return the disallowed directory or null if nothing has been added to the list.
*/
public static File maybeMarkUnreadable(File path)
{
File directory = getDirectory(path);
if (instance.unreadableDirectories.add(directory))
{
logger.warn("Blacklisting {} for reads", directory);
logger.warn("Disallowing {} for reads", directory);
return directory;
}
return null;
......@@ -76,14 +77,14 @@ public class BlacklistedDirectories implements BlacklistedDirectoriesMBean
* Adds parent directory of the file (or the file itself, if it is a directory)
* to the set of unwritable directories.
*
* @return the blacklisted directory or null if nothing has been added to the list.
* @return the disallowed directory or null if nothing has been added to the list.
*/
public static File maybeMarkUnwritable(File path)
{
File directory = getDirectory(path);
if (instance.unwritableDirectories.add(directory))
{
logger.warn("Blacklisting {} for writes", directory);
logger.warn("Disallowing {} for writes", directory);
return directory;
}
return null;
......@@ -101,8 +102,8 @@ public class BlacklistedDirectories implements BlacklistedDirectoriesMBean
/**
* Tells whether or not the directory is blacklisted for reads.
* @return whether or not the directory is blacklisted for reads.
* Tells whether or not the directory is disallowed for reads.
* @return whether or not the directory is disallowed for reads.
*/
public static boolean isUnreadable(File directory)
{
......@@ -110,8 +111,8 @@ public class BlacklistedDirectories implements BlacklistedDirectoriesMBean
}
/**
* Tells whether or not the directory is blacklisted for writes.
* @return whether or not the directory is blacklisted for reads.
* Tells whether or not the directory is disallowed for writes.
* @return whether or not the directory is disallowed for reads.
*/
public static boolean isUnwritable(File directory)
{
......
......@@ -20,7 +20,8 @@ package org.apache.cassandra.db;
import java.io.File;
import java.util.Set;
public interface BlacklistedDirectoriesMBean {
public interface DisallowedDirectoriesMBean
{
public Set<File> getUnreadableDirectories();
......
......@@ -247,10 +247,10 @@ public abstract class AbstractCompactionStrategy
}
/**
* Filters SSTables that are to be blacklisted from the given collection
* Filters SSTables that are to be excluded from the given collection
*
* @param originalCandidates The collection to check for blacklisted SSTables
* @return list of the SSTables with blacklisted ones filtered out
* @param originalCandidates The collection to check for excluded SSTables
* @return list of the SSTables with excluded ones filtered out
*/
public static List<SSTableReader> filterSuspectSSTables(Iterable<SSTableReader> originalCandidates)
{
......
......@@ -576,7 +576,7 @@ public class LeveledManifest
/**
* @return highest-priority sstables to compact for the given level.
* If no compactions are possible (because of concurrent compactions or because some sstables are blacklisted
* If no compactions are possible (because of concurrent compactions or because some sstables are excluded
* for prior failure), will return an empty list. Never returns null.
*/
private Collection<SSTableReader> getCandidatesFor(int level)
......
......@@ -242,7 +242,7 @@ final class HintsDispatchExecutor
{
logger.error("Failed to dispatch hints file {}: file is corrupted ({})", descriptor.fileName(), e);
store.cleanUp(descriptor);
store.blacklist(descriptor);
store.markCorrupted(descriptor);
throw e;
}
}
......
......@@ -52,7 +52,7 @@ final class HintsStore
private final Map<HintsDescriptor, InputPosition> dispatchPositions;
private final Deque<HintsDescriptor> dispatchDequeue;
private final Queue<HintsDescriptor> blacklistedFiles;
private final Queue<HintsDescriptor> corruptedFiles;
// last timestamp used in a descriptor; make sure to not reuse the same timestamp for new descriptors.
private volatile long lastUsedTimestamp;
......@@ -66,7 +66,7 @@ final class HintsStore
dispatchPositions = new ConcurrentHashMap<>();
dispatchDequeue = new ConcurrentLinkedDeque<>(descriptors);
blacklistedFiles = new ConcurrentLinkedQueue<>();
corruptedFiles = new ConcurrentLinkedQueue<>();
//noinspection resource
lastUsedTimestamp = descriptors.stream().mapToLong(d -> d.timestamp).max().orElse(0L);
......@@ -119,7 +119,7 @@ final class HintsStore
delete(descriptor);
}
while ((descriptor = blacklistedFiles.poll()) != null)
while ((descriptor = corruptedFiles.poll()) != null)
{
cleanUp(descriptor);
delete(descriptor);
......@@ -158,9 +158,9 @@ final class HintsStore
dispatchPositions.remove(descriptor);
}
void blacklist(HintsDescriptor descriptor)
void markCorrupted(HintsDescriptor descriptor)
{
blacklistedFiles.add(descriptor);
corruptedFiles.add(descriptor);
}
/*
......
......@@ -1687,7 +1687,7 @@ public abstract class SSTableReader extends SSTable implements SelfRefCounted<SS
public void markSuspect()
{
if (logger.isTraceEnabled())
logger.trace("Marking {} as a suspect for blacklisting.", getFilename());
logger.trace("Marking {} as a suspect to be excluded from reads.", getFilename());
isSuspect.getAndSet(true);
}
......
......@@ -24,7 +24,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.BlacklistedDirectories;
import org.apache.cassandra.db.DisallowedDirectories;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.io.FSError;
import org.apache.cassandra.io.FSErrorHandler;
......@@ -66,10 +66,10 @@ public class DefaultFSErrorHandler implements FSErrorHandler
break;
case best_effort:
// for both read and write errors mark the path as unwritable.
BlacklistedDirectories.maybeMarkUnwritable(e.path);
DisallowedDirectories.maybeMarkUnwritable(e.path);
if (e instanceof FSReadError)
{
File directory = BlacklistedDirectories.maybeMarkUnreadable(e.path);
File directory = DisallowedDirectories.maybeMarkUnreadable(e.path);
if (directory != null)
Keyspace.removeUnreadableSSTables(directory);
}
......
......@@ -639,13 +639,13 @@ public class Util
for ( ; ; )
{
DataDirectory dir = cfs.getDirectories().getWriteableLocation(1);
BlacklistedDirectories.maybeMarkUnwritable(cfs.getDirectories().getLocationForDisk(dir));
DisallowedDirectories.maybeMarkUnwritable(cfs.getDirectories().getLocationForDisk(dir));
}
}
catch (IOError e)
{
// Expected -- marked all directories as unwritable
}
return () -> BlacklistedDirectories.clearUnwritableUnsafe();
return () -> DisallowedDirectories.clearUnwritableUnsafe();
}
}
......@@ -338,7 +338,7 @@ public class DirectoriesTest
for (DataDirectory dd : Directories.dataDirectories)
{
File file = new File(dd.location, new File(KS, "bad").getPath());
assertTrue(BlacklistedDirectories.isUnwritable(file));
assertTrue(DisallowedDirectories.isUnwritable(file));
}
}
finally
......
......@@ -47,13 +47,13 @@ import org.apache.cassandra.schema.*;
import static org.junit.Assert.assertTrue;
public class BlacklistingCompactionsTest
public class CorruptedSSTablesCompactionsTest
{
private static final Logger logger = LoggerFactory.getLogger(BlacklistingCompactionsTest.class);
private static final Logger logger = LoggerFactory.getLogger(CorruptedSSTablesCompactionsTest.class);
private static Random random;
private static final String KEYSPACE1 = "BlacklistingCompactionsTest";
private static final String KEYSPACE1 = "CorruptedSSTablesCompactionsTest";
private static final String STANDARD_STCS = "Standard_STCS";
private static final String STANDARD_LCS = "Standard_LCS";
private static int maxValueSize;
......@@ -112,18 +112,19 @@ public class BlacklistingCompactionsTest
}
@Test
public void testBlacklistingWithSizeTieredCompactionStrategy() throws Exception
public void testCorruptedSSTablesWithSizeTieredCompactionStrategy() throws Exception
{
testBlacklisting(STANDARD_STCS);
testCorruptedSSTables(STANDARD_STCS);
}
@Test
public void testBlacklistingWithLeveledCompactionStrategy() throws Exception
public void testCorruptedSSTablesWithLeveledCompactionStrategy() throws Exception
{
testBlacklisting(STANDARD_LCS);
testCorruptedSSTables(STANDARD_LCS);
}
private void testBlacklisting(String tableName) throws Exception
public void testCorruptedSSTables(String tableName) throws Exception
{
// this test does enough rows to force multiple block indexes to be used
Keyspace keyspace = Keyspace.open(KEYSPACE1);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment