Skip to content

Service Accounts - New CLI tool for managing file tokens #70454

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Mar 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,8 @@ public void test94ElasticsearchNodeExecuteCliNotEsHomeWorkDir() throws Exception
assertThat(result.stdout, containsString("Sets the passwords for reserved users"));
result = sh.run(bin.usersTool + " -h");
assertThat(result.stdout, containsString("Manages elasticsearch file users"));
result = sh.run(bin.serviceTokensTool + " -h");
assertThat(result.stdout, containsString("Manages elasticsearch service account file-tokens"));
};

Platforms.onLinux(action);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist
"elasticsearch-sql-cli",
"elasticsearch-syskeygen",
"elasticsearch-users",
"elasticsearch-service-tokens",
"x-pack-env",
"x-pack-security-env",
"x-pack-watcher-env"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -501,6 +501,7 @@ private static void verifyDefaultInstallation(Installation es) {
"elasticsearch-sql-cli",
"elasticsearch-syskeygen",
"elasticsearch-users",
"elasticsearch-service-tokens",
"x-pack-env",
"x-pack-security-env",
"x-pack-watcher-env"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,5 +189,6 @@ public class Executables {
public final Executable sqlCli = new Executable("elasticsearch-sql-cli");
public final Executable syskeygenTool = new Executable("elasticsearch-syskeygen");
public final Executable usersTool = new Executable("elasticsearch-users");
public final Executable serviceTokensTool = new Executable("elasticsearch-service-tokens");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,7 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist
"elasticsearch-sql-cli",
"elasticsearch-syskeygen",
"elasticsearch-users",
"elasticsearch-service-tokens",
"x-pack-env",
"x-pack-security-env",
"x-pack-watcher-env"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,26 @@ private XPackSettings() {
}
}, Property.NodeScope);

// TODO: This setting of hashing algorithm can share code with the one for password when pbkdf2_stretch is the default for both
public static final Setting<String> SERVICE_TOKEN_HASHING_ALGORITHM = new Setting<>(
new Setting.SimpleKey("xpack.security.authc.service_token_hashing.algorithm"),
(s) -> "PBKDF2_STRETCH",
Function.identity(),
v -> {
if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) {
throw new IllegalArgumentException("Invalid algorithm: " + v + ". Valid values for password hashing are " +
Hasher.getAvailableAlgoStoredHash().toString());
} else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) {
try {
SecretKeyFactory.getInstance("PBKDF2withHMACSHA512");
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException(
"Support for PBKDF2WithHMACSHA512 must be available in order to use any of the " +
"PBKDF2 algorithms for the [xpack.security.authc.service_token_hashing.algorithm] setting.", e);
}
}
}, Property.NodeScope);

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm surprised that we need so much boilerplate in order to add another hashing algorithm. Do we need an issue to refactor them down into some amount of shared code?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it can be part of #66840. The implication is 8.0 only, but I think it is fine.

public static final List<String> DEFAULT_SUPPORTED_PROTOCOLS;

static {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,27 @@ public void testDefaultSupportedProtocols() {
}
}

public void testServiceTokenHashingAlgorithmSettingValidation() {
final boolean isPBKDF2Available = isSecretkeyFactoryAlgoAvailable("PBKDF2WithHMACSHA512");
final String pbkdf2Algo = randomFrom("PBKDF2_10000", "PBKDF2", "PBKDF2_STRETCH");
final Settings settings = Settings.builder().put(XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.getKey(), pbkdf2Algo).build();
if (isPBKDF2Available) {
assertEquals(pbkdf2Algo, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.get(settings));
} else {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.get(settings));
assertThat(e.getMessage(), containsString("Support for PBKDF2WithHMACSHA512 must be available"));
}

final String bcryptAlgo = randomFrom("BCRYPT", "BCRYPT11");
assertEquals(bcryptAlgo, XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.get(
Settings.builder().put(XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.getKey(), bcryptAlgo).build()));
}

public void testDefaultServiceTokenHashingAlgorithm() {
assertThat(XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM.get(Settings.EMPTY), equalTo("PBKDF2_STRETCH"));
}

private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) {
try {
SecretKeyFactory.getInstance(algorithmId);
Expand Down
11 changes: 11 additions & 0 deletions x-pack/plugin/security/src/main/bin/elasticsearch-service-tokens
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/bash

# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License
# 2.0; you may not use this file except in compliance with the Elastic License
# 2.0.

ES_MAIN_CLASS=org.elasticsearch.xpack.security.authc.service.FileTokensTool \
ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
"`dirname "$0"`"/elasticsearch-cli \
"$@"
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
@echo off

rem Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
rem or more contributor license agreements. Licensed under the Elastic License
rem 2.0; you may not use this file except in compliance with the Elastic License
rem 2.0.

setlocal enabledelayedexpansion
setlocal enableextensions

set ES_MAIN_CLASS=org.elasticsearch.xpack.security.authc.service.FileTokensTool
set ES_ADDITIONAL_SOURCES=x-pack-env;x-pack-security-env
call "%~dp0elasticsearch-cli.bat" ^
%%* ^
|| goto exit

endlocal
endlocal
:exit
exit /b %ERRORLEVEL%
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
import org.elasticsearch.xpack.security.authc.service.ServiceAccountService;
import org.elasticsearch.xpack.security.authc.service.ServiceAccountsCredentialStore.CompositeServiceAccountsCredentialStore;
import org.elasticsearch.xpack.security.authc.service.ServiceAccountsTokenStore.CompositeServiceAccountsTokenStore;
import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator;
import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore;
import org.elasticsearch.xpack.security.authz.AuthorizationService;
Expand Down Expand Up @@ -492,7 +492,7 @@ Collection<Object> createComponents(Client client, ThreadPool threadPool, Cluste
components.add(apiKeyService);

final ServiceAccountService serviceAccountService =
new ServiceAccountService(new CompositeServiceAccountsCredentialStore(List.of()));
new ServiceAccountService(new CompositeServiceAccountsTokenStore(List.of()));

final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore,
privilegeStore, rolesProviders, threadPool.getThreadContext(), getLicenseState(), fieldPermissionsCache, apiKeyService,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.XPackPlugin;
Expand All @@ -28,6 +27,7 @@
import org.elasticsearch.xpack.core.security.support.Validation;
import org.elasticsearch.xpack.core.security.support.Validation.Users;
import org.elasticsearch.xpack.core.security.user.User;
import org.elasticsearch.xpack.security.support.FileReloadListener;
import org.elasticsearch.xpack.security.support.SecurityFiles;

import java.io.IOException;
Expand Down Expand Up @@ -62,7 +62,7 @@ public FileUserPasswdStore(RealmConfig config, ResourceWatcherService watcherSer
users = parseFileLenient(file, logger, settings);
listeners = new CopyOnWriteArrayList<>(Collections.singletonList(listener));
FileWatcher watcher = new FileWatcher(file.getParent());
watcher.addListener(new FileListener());
watcher.addListener(new FileReloadListener(file, this::tryReload));
try {
watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
} catch (IOException e) {
Expand Down Expand Up @@ -179,28 +179,13 @@ void notifyRefresh() {
listeners.forEach(Runnable::run);
}

private class FileListener implements FileChangesListener {
@Override
public void onFileCreated(Path file) {
onFileChanged(file);
}

@Override
public void onFileDeleted(Path file) {
onFileChanged(file);
}

@Override
public void onFileChanged(Path file) {
if (file.equals(FileUserPasswdStore.this.file)) {
final Map<String, char[]> previousUsers = users;
users = parseFileLenient(file, logger, settings);
private void tryReload() {
final Map<String, char[]> previousUsers = users;
users = parseFileLenient(file, logger, settings);

if (Maps.deepEquals(previousUsers, users) == false) {
logger.info("users file [{}] changed. updating users... )", file.toAbsolutePath());
notifyRefresh();
}
}
if (Maps.deepEquals(previousUsers, users) == false) {
logger.info("users file [{}] changed. updating users...", file.toAbsolutePath());
notifyRefresh();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ final class ElasticServiceAccounts {
null
));

static Map<String, ServiceAccount> ACCOUNTS = List.of(FLEET_ACCOUNT).stream()
.collect(Collectors.toMap(a -> a.id().serviceName(), Function.identity()));;
static final Map<String, ServiceAccount> ACCOUNTS = List.of(FLEET_ACCOUNT).stream()
.collect(Collectors.toMap(a -> a.id().asPrincipal(), Function.identity()));;

private ElasticServiceAccounts() {}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

package org.elasticsearch.xpack.security.authc.service;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
import org.elasticsearch.xpack.core.security.support.NoOpLogger;
import org.elasticsearch.xpack.security.support.FileLineParser;
import org.elasticsearch.xpack.security.support.FileReloadListener;
import org.elasticsearch.xpack.security.support.SecurityFiles;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;

public class FileServiceAccountsTokenStore implements ServiceAccountsTokenStore {

private static final Logger logger = LogManager.getLogger(FileServiceAccountsTokenStore.class);

private final Path file;
private final CopyOnWriteArrayList<Runnable> listeners;
private volatile Map<String, char[]> tokenHashes;

public FileServiceAccountsTokenStore(Environment env, ResourceWatcherService resourceWatcherService) {
file = resolveFile(env);
FileWatcher watcher = new FileWatcher(file.getParent());
watcher.addListener(new FileReloadListener(file, this::tryReload));
try {
resourceWatcherService.add(watcher, ResourceWatcherService.Frequency.HIGH);
} catch (IOException e) {
throw new ElasticsearchException("failed to start watching service_tokens file [{}]", e, file.toAbsolutePath());
}
try {
tokenHashes = parseFile(file, logger);
} catch (IOException e) {
throw new IllegalStateException("Failed to load service_tokens file [" + file + "]", e);
}
listeners = new CopyOnWriteArrayList<>();
}

@Override
public boolean authenticate(ServiceAccountToken token) {
return false;
}

public void addListener(Runnable listener) {
listeners.add(listener);
}

private void notifyRefresh() {
listeners.forEach(Runnable::run);
}

private void tryReload() {
final Map<String, char[]> previousTokenHashes = tokenHashes;
tokenHashes = parseFileLenient(file, logger);
if (false == Maps.deepEquals(tokenHashes, previousTokenHashes)) {
logger.info("service tokens file [{}] changed. updating ...", file.toAbsolutePath());
notifyRefresh();
}
}

// package private for testing
Map<String, char[]> getTokenHashes() {
return tokenHashes;
}

static Path resolveFile(Environment env) {
return XPackPlugin.resolveConfigFile(env, "service_tokens");
}

static Map<String, char[]> parseFileLenient(Path path, @Nullable Logger logger) {
try {
return parseFile(path, logger);
} catch (Exception e) {
logger.error("failed to parse service tokens file [{}]. skipping/removing all tokens...",
path.toAbsolutePath());
return Map.of();
}
}

static Map<String, char[]> parseFile(Path path, @Nullable Logger logger) throws IOException {
final Logger thisLogger = logger == null ? NoOpLogger.INSTANCE : logger;
thisLogger.trace("reading service_tokens file [{}]...", path.toAbsolutePath());
if (Files.exists(path) == false) {
thisLogger.trace("file [{}] does not exist", path.toAbsolutePath());
return Map.of();
}
final Map<String, char[]> parsedTokenHashes = new HashMap<>();
FileLineParser.parse(path, (lineNumber, line) -> {
line = line.trim();
final int colon = line.indexOf(':');
if (colon == -1) {
thisLogger.warn("invalid format at line #{} of service_tokens file [{}] - missing ':' character - ", lineNumber, path);
throw new IllegalStateException("Missing ':' character at line #" + lineNumber);
}
final String key = line.substring(0, colon);
// TODO: validate against known service accounts?
char[] hash = new char[line.length() - (colon + 1)];
line.getChars(colon + 1, line.length(), hash, 0);
if (Hasher.resolveFromHash(hash) == Hasher.NOOP) {
thisLogger.warn("skipping plaintext service account token for key [{}]", key);
} else {
thisLogger.trace("parsed tokens for key [{}]", key);
final char[] previousHash = parsedTokenHashes.put(key, hash);
if (previousHash != null) {
thisLogger.warn("found duplicated key [{}], earlier entries are overridden", key);
}
}
});
thisLogger.debug("parsed [{}] tokens from file [{}]", parsedTokenHashes.size(), path.toAbsolutePath());
return Map.copyOf(parsedTokenHashes);
}

static void writeFile(Path path, Map<String, char[]> tokenHashes) {
SecurityFiles.writeFileAtomically(
path, tokenHashes, e -> String.format(Locale.ROOT, "%s:%s", e.getKey(), new String(e.getValue())));
}
}
Loading