[UI Part 3] New implemented HTTP RESTful API of Frontend (#4596)

Use spring mvc rest to replace the original netty http rest
Created a new package `org/apache/doris/httpv2`,
and the origin implementations under `org/apache/doris/http` remain unchanged.

This part of the code will not be used at present, so it will not affect existing functions.

API document can be found in #4584 

Proposal #4308
This commit is contained in:
张家锋
2020-09-16 15:02:59 +08:00
committed by GitHub
parent 1191048f5f
commit 17a8b57018
61 changed files with 7174 additions and 80 deletions

View File

@ -88,7 +88,7 @@ public class PaloFe {
throw new IllegalArgumentException("Java version doesn't match");
}
Log4jConfig.initLogging();
Log4jConfig.initLogging(dorisHomeDir + "/conf/");
// set dns cache ttl
java.security.Security.setProperty("networkaddress.cache.ttl" , "60");

View File

@ -17,7 +17,7 @@
package org.apache.doris.common;
import com.google.common.collect.Maps;
import org.apache.doris.httpv2.config.SpringLog4j2Config;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.LoggerContext;
@ -26,6 +26,8 @@ import org.apache.logging.log4j.core.config.xml.XmlConfiguration;
import org.apache.logging.log4j.core.lookup.Interpolator;
import org.apache.logging.log4j.core.lookup.StrSubstitutor;
import com.google.common.collect.Maps;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Map;
@ -34,84 +36,91 @@ import java.util.Map;
// don't use trace. use INFO, WARN, ERROR, FATAL
//
public class Log4jConfig extends XmlConfiguration {
private static final long serialVersionUID = 1L;
private static String xmlConfTemplate = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"\n" +
"<Configuration status=\"info\" packages=\"org.apache.doris.common\">\n" +
" <Appenders>\n" +
" <RollingFile name=\"Sys\" fileName=\"${sys_log_dir}/fe.log\" filePattern=\"${sys_log_dir}/fe.log.${sys_file_pattern}-%i\">\n" +
" <PatternLayout charset=\"UTF-8\">\n" +
private static final long serialVersionUID = 1L;
private static String xmlConfTemplate = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"\n<!-- Auto Generated. DO NOT MODIFY IT! -->\n" +
"<Configuration status=\"info\" packages=\"org.apache.doris.common\">\n" +
" <Appenders>\n" +
" <RollingFile name=\"Sys\" fileName=\"${sys_log_dir}/fe.log\" filePattern=\"${sys_log_dir}/fe.log.${sys_file_pattern}-%i\">\n" +
" <PatternLayout charset=\"UTF-8\">\n" +
" <Pattern>%d{yyyy-MM-dd HH:mm:ss,SSS} %p (%t|%tid) [%C{1}.%M():%L] %m%n</Pattern>\n" +
" </PatternLayout>\n" +
" <Policies>\n" +
" <TimeBasedTriggeringPolicy/>\n" +
" <SizeBasedTriggeringPolicy size=\"${sys_roll_maxsize}MB\"/>\n" +
" </Policies>\n" +
" </PatternLayout>\n" +
" <Policies>\n" +
" <TimeBasedTriggeringPolicy/>\n" +
" <SizeBasedTriggeringPolicy size=\"${sys_roll_maxsize}MB\"/>\n" +
" </Policies>\n" +
" <DefaultRolloverStrategy max=\"${sys_roll_num}\" fileIndex=\"min\">\n" +
" <Delete basePath=\"${sys_log_dir}/\" maxDepth=\"1\">\n" +
" <IfFileName glob=\"fe.log.*\" />\n" +
" <IfLastModified age=\"${sys_log_delete_age}\" />\n" +
" </Delete>\n" +
" </DefaultRolloverStrategy>\n" +
" </RollingFile>\n" +
" <RollingFile name=\"SysWF\" fileName=\"${sys_log_dir}/fe.warn.log\" filePattern=\"${sys_log_dir}/fe.warn.log.${sys_file_pattern}-%i\">\n" +
" <PatternLayout charset=\"UTF-8\">\n" +
" </RollingFile>\n" +
" <RollingFile name=\"SysWF\" fileName=\"${sys_log_dir}/fe.warn.log\" filePattern=\"${sys_log_dir}/fe.warn.log.${sys_file_pattern}-%i\">\n" +
" <PatternLayout charset=\"UTF-8\">\n" +
" <Pattern>%d{yyyy-MM-dd HH:mm:ss,SSS} %p (%t|%tid) [%C{1}.%M():%L] %m%n</Pattern>\n" +
" </PatternLayout>\n" +
" <Policies>\n" +
" <TimeBasedTriggeringPolicy/>\n" +
" <SizeBasedTriggeringPolicy size=\"${sys_roll_maxsize}MB\"/>\n" +
" </Policies>\n" +
" </PatternLayout>\n" +
" <Policies>\n" +
" <TimeBasedTriggeringPolicy/>\n" +
" <SizeBasedTriggeringPolicy size=\"${sys_roll_maxsize}MB\"/>\n" +
" </Policies>\n" +
" <DefaultRolloverStrategy max=\"${sys_roll_num}\" fileIndex=\"min\">\n" +
" <Delete basePath=\"${sys_log_dir}/\" maxDepth=\"1\">\n" +
" <IfFileName glob=\"fe.warn.log.*\" />\n" +
" <IfLastModified age=\"${sys_log_delete_age}\" />\n" +
" </Delete>\n" +
" </DefaultRolloverStrategy>\n" +
" </RollingFile>\n" +
" <RollingFile name=\"Auditfile\" fileName=\"${audit_log_dir}/fe.audit.log\" filePattern=\"${audit_log_dir}/fe.audit.log.${audit_file_pattern}-%i\">\n" +
" <PatternLayout charset=\"UTF-8\">\n" +
" <Pattern>%d{yyyy-MM-dd HH:mm:ss,SSS} [%c{1}] %m%n</Pattern>\n" +
" </PatternLayout>\n" +
" <Policies>\n" +
" <TimeBasedTriggeringPolicy/>\n" +
" <SizeBasedTriggeringPolicy size=\"${audit_roll_maxsize}MB\"/>\n" +
" </Policies>\n" +
" </RollingFile>\n" +
" <RollingFile name=\"Auditfile\" fileName=\"${audit_log_dir}/fe.audit.log\" filePattern=\"${audit_log_dir}/fe.audit.log.${audit_file_pattern}-%i\">\n" +
" <PatternLayout charset=\"UTF-8\">\n" +
" <Pattern>%d{yyyy-MM-dd HH:mm:ss,SSS} [%c{1}] %m%n</Pattern>\n" +
" </PatternLayout>\n" +
" <Policies>\n" +
" <TimeBasedTriggeringPolicy/>\n" +
" <SizeBasedTriggeringPolicy size=\"${audit_roll_maxsize}MB\"/>\n" +
" </Policies>\n" +
" <DefaultRolloverStrategy max=\"${sys_roll_num}\" fileIndex=\"min\">\n" +
" <Delete basePath=\"${audit_log_dir}/\" maxDepth=\"1\">\n" +
" <IfFileName glob=\"fe.audit.log.*\" />\n" +
" <IfLastModified age=\"${audit_log_delete_age}\" />\n" +
" </Delete>\n" +
" </DefaultRolloverStrategy>\n" +
" </RollingFile>\n" +
" </Appenders>\n" +
" <Loggers>\n" +
" <Root level=\"${sys_log_level}\">\n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" <AppenderRef ref=\"SysWF\" level=\"WARN\"/>\n" +
" </Root>\n" +
" <Logger name=\"audit\" level=\"ERROR\" additivity=\"false\">\n" +
" <AppenderRef ref=\"Auditfile\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.thrift\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.thrift.transport\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.doris.thrift\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <!--REPLACED BY AUDIT AND VERBOSE MODULE NAMES-->\n" +
" </Loggers>\n" +
" </RollingFile>\n" +
" </Appenders>\n" +
" <Loggers>\n" +
" <Root level=\"${sys_log_level}\">\n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" <AppenderRef ref=\"SysWF\" level=\"WARN\"/>\n" +
" </Root>\n" +
" <Logger name=\"audit\" level=\"ERROR\" additivity=\"false\">\n" +
" <AppenderRef ref=\"Auditfile\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.thrift\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.thrift.transport\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.doris.thrift\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <Logger name=\"org.apache.doris.http\" level=\"DEBUG\"> \n" +
" <AppenderRef ref=\"Sys\"/>\n" +
" </Logger>\n" +
" <!--REPLACED BY AUDIT AND VERBOSE MODULE NAMES-->\n" +
" </Loggers>\n" +
"</Configuration>";
private static StrSubstitutor strSub;
private static String sysLogLevel;
private static String[] verboseModules;
private static String[] auditModules;
// save the generated xml conf template
private static String logXmlConfTemplate;
// dir of fe.conf
public static String confDir;
private static void reconfig() throws IOException {
String newXmlConfTemplate = xmlConfTemplate;
@ -119,14 +128,14 @@ public class Log4jConfig extends XmlConfiguration {
String sysLogDir = Config.sys_log_dir;
String sysRollNum = String.valueOf(Config.sys_log_roll_num);
String sysDeleteAge = String.valueOf(Config.sys_log_delete_age);
if (!(sysLogLevel.equalsIgnoreCase("INFO") ||
if (!(sysLogLevel.equalsIgnoreCase("INFO") ||
sysLogLevel.equalsIgnoreCase("WARN") ||
sysLogLevel.equalsIgnoreCase("ERROR") ||
sysLogLevel.equalsIgnoreCase("ERROR") ||
sysLogLevel.equalsIgnoreCase("FATAL"))) {
throw new IOException("sys_log_level config error");
}
String sysLogRollPattern = "%d{yyyyMMdd}";
String sysRollMaxSize = String.valueOf(Config.log_roll_size_mb);
if (Config.sys_log_roll_interval.equals("HOUR")) {
@ -136,7 +145,7 @@ public class Log4jConfig extends XmlConfiguration {
} else {
throw new IOException("sys_log_roll_interval config error: " + Config.sys_log_roll_interval);
}
// audit log config
String auditLogDir = Config.audit_log_dir;
String auditLogRollPattern = "%d{yyyyMMdd}";
@ -150,7 +159,7 @@ public class Log4jConfig extends XmlConfiguration {
} else {
throw new IOException("audit_log_roll_interval config error: " + Config.audit_log_roll_interval);
}
// verbose modules and audit log modules
StringBuilder sb = new StringBuilder();
for (String s : verboseModules) {
@ -160,8 +169,8 @@ public class Log4jConfig extends XmlConfiguration {
sb.append("<Logger name='audit." + s + "' level='INFO'/>");
}
newXmlConfTemplate = newXmlConfTemplate.replaceAll("<!--REPLACED BY AUDIT AND VERBOSE MODULE NAMES-->",
sb.toString());
sb.toString());
Map<String, String> properties = Maps.newHashMap();
properties.put("sys_log_dir", sysLogDir);
properties.put("sys_file_pattern", sysLogRollPattern);
@ -169,41 +178,47 @@ public class Log4jConfig extends XmlConfiguration {
properties.put("sys_roll_num", sysRollNum);
properties.put("sys_log_delete_age", sysDeleteAge);
properties.put("sys_log_level", sysLogLevel);
properties.put("audit_log_dir", auditLogDir);
properties.put("audit_file_pattern", auditLogRollPattern);
properties.put("audit_roll_maxsize", auditRollMaxSize);
properties.put("audit_roll_num", auditRollNum);
properties.put("audit_log_delete_age", auditDeleteAge);
strSub = new StrSubstitutor(new Interpolator(properties));
newXmlConfTemplate = strSub.replace(newXmlConfTemplate);
System.out.println("=====");
System.out.println(newXmlConfTemplate);
System.out.println("=====");
logXmlConfTemplate = newXmlConfTemplate;
SpringLog4j2Config.writeSpringLogConf(confDir);
// new SimpleLog4jConfiguration with xmlConfTemplate
ByteArrayInputStream bis = new ByteArrayInputStream(newXmlConfTemplate.getBytes("UTF-8"));
ConfigurationSource source = new ConfigurationSource(bis);
Log4jConfig config = new Log4jConfig(source);
// LoggerContext.start(new Configuration)
LoggerContext context = (LoggerContext) LogManager.getContext(false);
context.start(config);
context.start(config);
}
public static String getLogXmlConfTemplate() {
return logXmlConfTemplate;
}
public static class Tuple<X, Y, Z> {
public final X x;
public final Y y;
public final Z z;
public Tuple(X x, Y y, Z z) {
this.x = x;
this.y = y;
public final X x;
public final Y y;
public final Z z;
public Tuple(X x, Y y, Z z) {
this.x = x;
this.y = y;
this.z = z;
}
}
}
}
@Override
public StrSubstitutor getStrSubstitutor() {
return strSub;
@ -212,11 +227,12 @@ public class Log4jConfig extends XmlConfiguration {
public Log4jConfig(final ConfigurationSource configSource) {
super(LoggerContext.getContext(), configSource);
}
public synchronized static void initLogging() throws IOException {
public synchronized static void initLogging(String dorisConfDir) throws IOException {
sysLogLevel = Config.sys_log_level;
verboseModules = Config.sys_log_verbose_modules;
auditModules = Config.audit_log_modules;
confDir = dorisConfDir;
reconfig();
}

View File

@ -0,0 +1,88 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2;
import org.apache.doris.analysis.UserIdentity;
import com.google.common.base.Strings;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
import java.util.concurrent.TimeUnit;
// We simulate a simplified session here: only store user-name of clients who already logged in,
// and we only have a default admin user for now.
public final class HttpAuthManager {
private static final Logger LOG = LogManager.getLogger(HttpAuthManager.class);
private static long SESSION_EXPIRE_TIME = 2; // hour
private static long SESSION_MAX_SIZE = 100; // avoid to store too many
private static HttpAuthManager instance = new HttpAuthManager();
public static class SessionValue {
public UserIdentity currentUser;
public String password;
}
// session_id => session value
private Cache<String, SessionValue> authSessions = CacheBuilder.newBuilder()
.maximumSize(SESSION_MAX_SIZE)
.expireAfterAccess(SESSION_EXPIRE_TIME, TimeUnit.HOURS)
.build();
private HttpAuthManager() {
// do nothing
}
public static HttpAuthManager getInstance() {
return instance;
}
public SessionValue getSessionValue(List<String> sessionIds) {
for (String sessionId : sessionIds) {
SessionValue sv = authSessions.getIfPresent(sessionId);
if (sv != null) {
LOG.debug("get session value {} by session id: {}, left size: {}",
sv == null ? null : sv.currentUser, sessionId, authSessions.size());
return sv;
}
}
return null;
}
public void removeSession(String sessionId){
if (!Strings.isNullOrEmpty(sessionId)) {
authSessions.invalidate(sessionId);
LOG.debug("remove session id: {}, left size: {}", sessionId, authSessions.size());
}
}
public void addSessionValue(String key, SessionValue value) {
authSessions.put(key, value);
}
public Cache<String, SessionValue> getAuthSessions() {
return authSessions;
}
}

View File

@ -0,0 +1,65 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2;
import org.apache.doris.httpv2.config.SpringLog4j2Config;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
import java.util.HashMap;
import java.util.Map;
@SpringBootApplication
@EnableConfigurationProperties
@ServletComponentScan
public class HttpServer extends SpringBootServletInitializer {
private int port;
public void setPort(int port) {
this.port = port;
}
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(HttpServer.class);
}
public void start(String dorisHome) {
Map<String, Object> properties = new HashMap<>();
properties.put("server.port", port);
properties.put("server.servlet.context-path", "/");
properties.put("spring.resources.static-locations", "classpath:/static");
properties.put("spring.http.encoding.charset", "UTF-8");
properties.put("spring.http.encoding.enabled", true);
properties.put("spring.http.encoding.force", true);
// properties.put("spring.http.multipart.maxFileSize", "100Mb");
// properties.put("spring.http.multipart.maxRequestSize", "100Mb");
properties.put("spring.servlet.multipart.max-file-size", "100MB");
properties.put("spring.servlet.multipart.max-request-size", "100MB");
properties.put("logging.config", dorisHome + "/conf/" + SpringLog4j2Config.SPRING_LOG_XML_FILE);
new SpringApplicationBuilder()
.sources(HttpServer.class)
.properties(properties)
.run(new String[]{});
}
}

View File

@ -0,0 +1,66 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.config;
import org.apache.doris.common.Log4jConfig;
import org.springframework.beans.BeansException;
import org.springframework.boot.logging.LogFile;
import org.springframework.boot.logging.LoggingInitializationContext;
import org.springframework.boot.logging.LoggingSystem;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.stereotype.Component;
import org.springframework.util.ResourceUtils;
import java.io.File;
@Component
public class ReadEnvironment implements ApplicationContextAware {
private ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
public void reinitializeLoggingSystem() {
ConfigurableEnvironment environment = (ConfigurableEnvironment) this.applicationContext.getEnvironment();
File file = new File(Log4jConfig.confDir + SpringLog4j2Config.SPRING_LOG_XML_FILE);
String logConfig = file.getAbsolutePath();
LogFile logFile = LogFile.get(environment);
LoggingSystem system = LoggingSystem.get(LoggingSystem.class.getClassLoader());
try {
ResourceUtils.getURL(logConfig).openStream().close();
// Three step initialization that accounts for the clean up of the logging
// context before initialization. Spring Boot doesn't initialize a logging
// system that hasn't had this sequence applied (since 1.4.1).
system.cleanUp();
system.beforeInitialize();
system.initialize(new LoggingInitializationContext(environment),
logConfig, logFile);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}

View File

@ -0,0 +1,58 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.config;
import org.apache.doris.common.Log4jConfig;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
public class SpringLog4j2Config {
public static final String SPRING_LOG_XML_FILE = "log4j2-spring.xml";
/**
* write spring boot log4j2-spring.xml file
*/
public static void writeSpringLogConf(String confDir) throws IOException {
Writer writer = null;
try {
// log4j2-spring.xml file path
File file = new File(confDir + SPRING_LOG_XML_FILE);
if (!file.exists()) {
file.createNewFile();
//write file
writer = new FileWriter(file);
writer.write(Log4jConfig.getLogXmlConfTemplate());
} else {
file.deleteOnExit();
file.createNewFile();
//write file
writer = new FileWriter(file);
writer.write(Log4jConfig.getLogXmlConfTemplate());
}
System.out.println("==============================");
} finally {
if (writer != null) {
writer.close();
}
}
}
}

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.config;
import org.apache.doris.httpv2.interceptor.AuthInterceptor;
import org.springframework.boot.web.server.ErrorPage;
import org.springframework.boot.web.server.WebServerFactoryCustomizer;
import org.springframework.boot.web.servlet.server.ConfigurableServletWebServerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpStatus;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class WebConfigurer implements WebMvcConfigurer {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new AuthInterceptor())
.addPathPatterns("/rest/v1/**")
.excludePathPatterns("/", "/api/**", "/rest/v1/login", "/rest/v1/logout", "/static/**", "/metrics")
.excludePathPatterns("/image","/info","/version","/put","/journal_id","/role","/check","/dump");
}
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**")
.allowCredentials(false)
.allowedMethods("*")
.allowedOrigins("*")
.allowedHeaders("*")
.maxAge(3600);
}
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/notFound").setViewName("forward:/index.html");
}
@Bean
public WebServerFactoryCustomizer<ConfigurableServletWebServerFactory> containerCustomizer() {
return container -> {
container.addErrorPages(new ErrorPage(HttpStatus.NOT_FOUND,
"/notFound"));
};
}
}

View File

@ -0,0 +1,296 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.base64.Base64;
import io.netty.util.CharsetUtil;
import org.apache.doris.analysis.CompoundPredicate;
import org.apache.doris.analysis.UserIdentity;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.Config;
import org.apache.doris.httpv2.HttpAuthManager;
import org.apache.doris.httpv2.HttpAuthManager.SessionValue;
import org.apache.doris.httpv2.exception.UnauthorizedException;
import org.apache.doris.mysql.privilege.PaloPrivilege;
import org.apache.doris.mysql.privilege.PrivBitSet;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.service.FrontendOptions;
import org.apache.doris.system.SystemInfoService;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class BaseController {
private static final Logger LOG = LogManager.getLogger(BaseController.class);
public static final String PALO_SESSION_ID = "PALO_SESSION_ID";
private static final int PALO_SESSION_EXPIRED_TIME = 3600 * 24; // one day
// We first check cookie, if not admin, we check http's authority header
public void checkAuthWithCookie(HttpServletRequest request, HttpServletResponse response) {
checkWithCookie(request, response, true);
}
public ActionAuthorizationInfo checkWithCookie(HttpServletRequest request, HttpServletResponse response, boolean checkAuth) {
ActionAuthorizationInfo authInfo = checkCookie(request, response, checkAuth);
if (authInfo != null) {
return authInfo;
}
// cookie is invalid. check auth info in request
authInfo = getAuthorizationInfo(request);
UserIdentity currentUser = checkPassword(authInfo);
if (checkAuth) {
checkGlobalAuth(currentUser, PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ADMIN_PRIV,
PaloPrivilege.NODE_PRIV), CompoundPredicate.Operator.OR));
}
SessionValue value = new SessionValue();
value.currentUser = currentUser;
value.password = authInfo.password;
addSession(request, response, value);
ConnectContext ctx = new ConnectContext(null);
ctx.setQualifiedUser(authInfo.fullUserName);
ctx.setRemoteIP(authInfo.remoteIp);
ctx.setCurrentUserIdentity(currentUser);
ctx.setCatalog(Catalog.getCurrentCatalog());
ctx.setCluster(SystemInfoService.DEFAULT_CLUSTER);
ctx.setThreadLocalInfo();
LOG.debug("check auth without cookie success for user: {}, thread: {}",
currentUser, Thread.currentThread().getId());
return authInfo;
}
protected void addSession(HttpServletRequest request, HttpServletResponse response, SessionValue value) {
String key = UUID.randomUUID().toString();
Cookie cookie = new Cookie(PALO_SESSION_ID, key);
cookie.setMaxAge(PALO_SESSION_EXPIRED_TIME);
cookie.setPath("/");
response.addCookie(cookie);
LOG.debug("add session cookie: {} {}", PALO_SESSION_ID, key);
HttpAuthManager.getInstance().addSessionValue(key, value);
}
private ActionAuthorizationInfo checkCookie(HttpServletRequest request, HttpServletResponse response,
boolean checkAuth) {
List<String> sessionIds = getCookieValues(request, PALO_SESSION_ID, response);
if (sessionIds.isEmpty()) {
return null;
}
HttpAuthManager authMgr = HttpAuthManager.getInstance();
SessionValue sessionValue = authMgr.getSessionValue(sessionIds);
if (sessionValue == null) {
return null;
}
if (checkAuth && !Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(sessionValue.currentUser,
PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ADMIN_PRIV,
PaloPrivilege.NODE_PRIV), CompoundPredicate.Operator.OR))) {
// need to check auth and check auth failed
return null;
}
updateCookieAge(request, PALO_SESSION_ID, PALO_SESSION_EXPIRED_TIME, response);
ConnectContext ctx = new ConnectContext(null);
ctx.setQualifiedUser(sessionValue.currentUser.getQualifiedUser());
ctx.setRemoteIP(request.getRemoteHost());
ctx.setCurrentUserIdentity(sessionValue.currentUser);
ctx.setCatalog(Catalog.getCurrentCatalog());
ctx.setCluster(SystemInfoService.DEFAULT_CLUSTER);
ctx.setThreadLocalInfo();
LOG.debug("check cookie success for user: {}, thread: {}",
sessionValue.currentUser, Thread.currentThread().getId());
ActionAuthorizationInfo authInfo = new ActionAuthorizationInfo();
authInfo.fullUserName = sessionValue.currentUser.getQualifiedUser();
authInfo.remoteIp = request.getRemoteHost();
authInfo.password = sessionValue.password;
authInfo.cluster = SystemInfoService.DEFAULT_CLUSTER;
return authInfo;
}
public List<String> getCookieValues(HttpServletRequest request, String cookieName, HttpServletResponse response) {
Cookie[] cookies = request.getCookies();
List<String> sessionIds = Lists.newArrayList();
if (cookies != null) {
for (Cookie cookie : cookies) {
if (cookie.getName() != null && cookie.getName().equals(cookieName)) {
String sessionId = cookie.getValue();
LOG.debug("get cookie value. {}: {}", cookie.getName(), sessionId);
sessionIds.add(sessionId);
}
}
}
return sessionIds;
}
public void updateCookieAge(HttpServletRequest request, String cookieName, int age, HttpServletResponse response) {
Cookie[] cookies = request.getCookies();
for (Cookie cookie : cookies) {
if (cookie.getName() != null && cookie.getName().equals(cookieName)) {
cookie.setMaxAge(age);
response.addCookie(cookie);
LOG.debug("get update cookie: {} {}", cookie.getName(), cookie.getValue());
}
}
}
public static class ActionAuthorizationInfo {
public String fullUserName;
public String remoteIp;
public String password;
public String cluster;
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("user: ").append(fullUserName).append(", remote ip: ").append(remoteIp);
sb.append(", password: ").append(password).append(", cluster: ").append(cluster);
return sb.toString();
}
}
protected void checkGlobalAuth(UserIdentity currentUser, PrivPredicate predicate) throws UnauthorizedException {
if (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(currentUser, predicate)) {
throw new UnauthorizedException("Access denied; you need (at least one of) the "
+ predicate.getPrivs().toString() + " privilege(s) for this operation");
}
}
protected void checkDbAuth(UserIdentity currentUser, String db, PrivPredicate predicate)
throws UnauthorizedException {
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(currentUser, db, predicate)) {
throw new UnauthorizedException("Access denied; you need (at least one of) the "
+ predicate.getPrivs().toString() + " privilege(s) for this operation");
}
}
protected void checkTblAuth(UserIdentity currentUser, String db, String tbl, PrivPredicate predicate)
throws UnauthorizedException {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(currentUser, db, tbl, predicate)) {
throw new UnauthorizedException("Access denied; you need (at least one of) the "
+ predicate.getPrivs().toString() + " privilege(s) for this operation");
}
}
// return currentUserIdentity from Doris auth
protected UserIdentity checkPassword(ActionAuthorizationInfo authInfo)
throws UnauthorizedException {
List<UserIdentity> currentUser = Lists.newArrayList();
if (!Catalog.getCurrentCatalog().getAuth().checkPlainPassword(authInfo.fullUserName,
authInfo.remoteIp, authInfo.password, currentUser)) {
throw new UnauthorizedException("Access denied for "
+ authInfo.fullUserName + "@" + authInfo.remoteIp);
}
Preconditions.checkState(currentUser.size() == 1);
return currentUser.get(0);
}
public ActionAuthorizationInfo getAuthorizationInfo(HttpServletRequest request)
throws UnauthorizedException {
ActionAuthorizationInfo authInfo = new ActionAuthorizationInfo();
if (!parseAuthInfo(request, authInfo)) {
LOG.info("parse auth info failed, Authorization header {}, url {}",
request.getHeader("Authorization"), request.getRequestURI());
throw new UnauthorizedException("Need auth information.");
}
LOG.debug("get auth info: {}", authInfo);
return authInfo;
}
private boolean parseAuthInfo(HttpServletRequest request, ActionAuthorizationInfo authInfo) {
String encodedAuthString = request.getHeader("Authorization");
if (Strings.isNullOrEmpty(encodedAuthString)) {
return false;
}
String[] parts = encodedAuthString.split(" ");
if (parts.length != 2) {
return false;
}
encodedAuthString = parts[1];
ByteBuf buf = null;
ByteBuf decodeBuf = null;
try {
buf = Unpooled.copiedBuffer(ByteBuffer.wrap(encodedAuthString.getBytes()));
// The authString is a string connecting user-name and password with
// a colon(':')
decodeBuf = Base64.decode(buf);
String authString = decodeBuf.toString(CharsetUtil.UTF_8);
// Note that password may contain colon, so can not simply use a
// colon to split.
int index = authString.indexOf(":");
authInfo.fullUserName = authString.substring(0, index);
final String[] elements = authInfo.fullUserName.split("@");
if (elements != null && elements.length < 2) {
authInfo.fullUserName = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER,
authInfo.fullUserName);
authInfo.cluster = SystemInfoService.DEFAULT_CLUSTER;
} else if (elements != null && elements.length == 2) {
authInfo.fullUserName = ClusterNamespace.getFullName(elements[1], elements[0]);
authInfo.cluster = elements[1];
}
authInfo.password = authString.substring(index + 1);
authInfo.remoteIp = request.getRemoteAddr();
} finally {
// release the buf and decode buf after using Unpooled.copiedBuffer
// or it will get memory leak
if (buf != null) {
buf.release();
}
if (decodeBuf != null) {
decodeBuf.release();
}
}
return true;
}
protected int checkIntParam(String strParam) {
return Integer.parseInt(strParam);
}
protected long checkLongParam(String strParam) {
return Long.parseLong(strParam);
}
protected String getCurrentFrontendURL() {
return "http://" + FrontendOptions.getLocalHostAddress() + ":" + Config.http_port;
}
}

View File

@ -0,0 +1,64 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.common.Config;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/rest/v1")
public class ConfigController {
private static final List<String> CONFIG_TABLE_HEADER = Lists.newArrayList("Name", "Value");
@RequestMapping(path = "/config/fe", method = RequestMethod.GET)
public Object variable() {
Map<String, Object> result = Maps.newHashMap();
appendConfigureInfo(result);
return ResponseEntityBuilder.ok(result);
}
private void appendConfigureInfo(Map<String, Object> result) {
result.put("column_names", CONFIG_TABLE_HEADER);
List<Map<String, String>> list = Lists.newArrayList();
result.put("rows", list);
try {
Map<String, String> confmap = Config.dump();
for (String key : confmap.keySet()) {
Map<String, String> info = new HashMap<>();
info.put("Name", key);
info.put("Value", confmap.get(key));
list.add(info);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,198 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.Config;
import org.apache.doris.ha.HAProtocol;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.persist.Storage;
import org.apache.doris.system.Frontend;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/rest/v1")
public class HaController {
@RequestMapping(path = "/ha", method = RequestMethod.GET)
public Object ha() {
Map<String, Object> result = new HashMap<>();
appendRoleInfo(result);
appendJournalInfo(result);
appendCanReadInfo(result);
appendNodesInfo(result);
appendImageInfo(result);
appendDbNames(result);
appendFe(result);
appendRemovedFe(result);
return ResponseEntityBuilder.ok(result);
}
private void appendRoleInfo(Map<String, Object> result) {
Map<String, Object> info = new HashMap<>();
List<Map<String, Object>> list = new ArrayList<>();
info.put("Name", "FrontendRole");
info.put("Value", Catalog.getCurrentCatalog().getFeType());
list.add(info);
result.put("FrontendRole", list);
}
private void appendJournalInfo(Map<String, Object> result) {
Map<String, Object> info = new HashMap<>();
List<Map<String, Object>> list = new ArrayList<>();
if (Catalog.getCurrentCatalog().isMaster()) {
info.put("Name", "FrontendRole");
info.put("Value", Catalog.getCurrentCatalog().getEditLog().getMaxJournalId());
} else {
info.put("Name", "FrontendRole");
info.put("Value", Catalog.getCurrentCatalog().getReplayedJournalId());
}
list.add(info);
result.put("CurrentJournalId", list);
}
private void appendNodesInfo(Map<String, Object> result) {
HAProtocol haProtocol = Catalog.getCurrentCatalog().getHaProtocol();
if (haProtocol == null) {
return;
}
List<InetSocketAddress> electableNodes = haProtocol.getElectableNodes(true);
if (electableNodes.isEmpty()) {
return;
}
//buffer.append("<h2>Electable nodes</h2>");
//buffer.append("<pre>");
List<Map<String, Object>> eleclist = new ArrayList<>();
for (InetSocketAddress node : electableNodes) {
Map<String, Object> info = new HashMap<>();
info.put("Name", node.getHostName());
info.put("Value", node.getAddress());
eleclist.add(info);
}
result.put("Electablenodes", eleclist);
List<InetSocketAddress> observerNodes = haProtocol.getObserverNodes();
if (observerNodes == null) {
return;
}
List<Map<String, Object>> list = new ArrayList<>();
for (InetSocketAddress node : observerNodes) {
Map<String, Object> observer = new HashMap<>();
observer.put("Name", node.getHostName());
observer.put("Value", node.getHostString());
list.add(observer);
}
result.put("Observernodes", list);
}
private void appendCanReadInfo(Map<String, Object> result) {
Map<String, Object> canRead = new HashMap<>();
List<Map<String, Object>> list = new ArrayList<>();
canRead.put("Name", "Status");
canRead.put("Value", Catalog.getCurrentCatalog().canRead());
list.add(canRead);
result.put("CanRead", list);
}
private void appendImageInfo(Map<String, Object> result) {
try {
List<Map<String, Object>> list = new ArrayList<>();
Map<String, Object> checkPoint = new HashMap<>();
Storage storage = new Storage(Config.meta_dir + "/image");
checkPoint.put("Name", "Version");
checkPoint.put("Value", storage.getImageSeq());
list.add(checkPoint);
long lastCheckpointTime = storage.getCurrentImageFile().lastModified();
Date date = new Date(lastCheckpointTime);
Map<String, Object> checkPoint1 = new HashMap<>();
checkPoint1.put("Name", "lastCheckPointTime");
checkPoint1.put("Value", date);
list.add(checkPoint1);
result.put("CheckpointInfo", list);
} catch (IOException e) {
e.printStackTrace();
}
}
private void appendDbNames(Map<String, Object> result) {
Map<String, Object> dbs = new HashMap<>();
List<Long> names = Catalog.getCurrentCatalog().getEditLog().getDatabaseNames();
if (names == null) {
return;
}
String msg = "";
for (long name : names) {
msg += name + " ";
}
List<Map<String, Object>> list = new ArrayList<>();
dbs.put("Name", "DatabaseNames");
dbs.put("Value", msg);
list.add(dbs);
result.put("databaseNames", list);
}
private void appendFe(Map<String, Object> result) {
List<Frontend> fes = Catalog.getCurrentCatalog().getFrontends(null /* all */);
if (fes == null) {
return;
}
List<Map<String, Object>> list = new ArrayList<>();
for (Frontend fe : fes) {
Map<String, Object> allowed = new HashMap<>();
allowed.put("Name", fe.getNodeName());
allowed.put("Value", fe.toString());
list.add(allowed);
}
result.put("allowedFrontends", list);
}
private void appendRemovedFe(Map<String, Object> result) {
List<String> feNames = Catalog.getCurrentCatalog().getRemovedFrontendNames();
List<Map<String, Object>> list = new ArrayList<>();
for (String feName : feNames) {
Map<String, Object> removed = new HashMap<>();
removed.put("Name", feName);
removed.put("Value", feName);
list.add(removed);
}
result.put("removedFronteds", list);
}
}

View File

@ -0,0 +1,271 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.GlobalMemory;
import oshi.hardware.HWDiskStore;
import oshi.hardware.HWPartition;
import oshi.hardware.HardwareAbstractionLayer;
import oshi.hardware.NetworkIF;
import oshi.hardware.VirtualMemory;
import oshi.software.os.FileSystem;
import oshi.software.os.NetworkParams;
import oshi.software.os.OSFileStore;
import oshi.software.os.OSProcess;
import oshi.software.os.OperatingSystem;
import oshi.util.FormatUtil;
import oshi.util.Util;
import org.apache.doris.common.Version;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/rest/v1")
public class HardwareInfoController {
@RequestMapping(path = "/hardware_info/fe", method = RequestMethod.GET)
public Object index() {
Map<String, Map<String, String>> map = new HashMap<>();
appendVersionInfo(map);
appendHardwareInfo(map);
return ResponseEntityBuilder.ok(map);
}
private void appendVersionInfo(Map<String, Map<String, String>> content) {
Map<String, String> map = new HashMap<>();
map.put("Version", Version.DORIS_BUILD_VERSION);
map.put("Git", Version.DORIS_BUILD_HASH);
map.put("BuildInfo", Version.DORIS_BUILD_INFO);
map.put("BuildTime", Version.DORIS_BUILD_TIME);
content.put("VersionInfo", map);
}
private void appendHardwareInfo(Map<String, Map<String, String>> content) {
SystemInfo si = new SystemInfo();
OperatingSystem os = si.getOperatingSystem();
HardwareAbstractionLayer hal = si.getHardware();
CentralProcessor processor = hal.getProcessor();
GlobalMemory memory = hal.getMemory();
Map<String, String> map = new HashMap<>();
map.put("OS", String.join("<br>", getOperatingSystem(os)));
map.put("Processor", String.join("<br>", getProcessor(processor)));
map.put("Memory", String.join("<br>", getMemory(memory)));
map.put("Processes", String.join("<br>", getProcesses(os, memory)));
map.put("Disk", String.join("<br>", getDisks(hal.getDiskStores())));
map.put("FileSystem", String.join("<br>", getFileSystem(os.getFileSystem())));
map.put("NetworkInterface", String.join("<br>", getNetworkInterfaces(hal.getNetworkIFs())));
map.put("NetworkParameter", String.join("<br>", getNetworkParameters(os.getNetworkParams())));
content.put("HarewareInfo", map);
}
private List<String> getOperatingSystem(OperatingSystem os) {
List<String> osInfo = new ArrayList<>();
osInfo.add(String.valueOf(os));
osInfo.add("Booted: " + Instant.ofEpochSecond(os.getSystemBootTime()));
osInfo.add("Uptime: " + FormatUtil.formatElapsedSecs(os.getSystemUptime()));
osInfo.add("Running with" + (os.isElevated() ? "" : "out") + " elevated permissions.");
return osInfo;
}
private List<String> getProcessor(CentralProcessor processor) {
List<String> processorInfo = new ArrayList<>();
processorInfo.add(String.valueOf(processor));
processorInfo.add(" " + processor.getPhysicalPackageCount() + " physical CPU package(s)");
processorInfo.add(" " + processor.getPhysicalProcessorCount() + " physical CPU core(s)");
processorInfo.add(" " + processor.getLogicalProcessorCount() + " logical CPU(s)");
processorInfo.add("Identifier:&nbsp;&nbsp; " + processor.getIdentifier());
processorInfo.add("ProcessorID:&nbsp;&nbsp; " + processor.getProcessorID());
processorInfo.add("Context Switches/Interrupts:&nbsp;&nbsp; " + processor.getContextSwitches()
+ " / " + processor.getInterrupts() + "<br>");
long[] prevTicks = processor.getSystemCpuLoadTicks();
long[][] prevProcTicks = processor.getProcessorCpuLoadTicks();
processorInfo.add("CPU, IOWait, and IRQ ticks @ 0 sec:&nbsp;&nbsp;" + Arrays.toString(prevTicks));
// Wait a second...
Util.sleep(1000);
long[] ticks = processor.getSystemCpuLoadTicks();
processorInfo.add("CPU, IOWait, and IRQ ticks @ 1 sec:&nbsp;&nbsp;" + Arrays.toString(ticks));
long user = ticks[CentralProcessor.TickType.USER.getIndex()] - prevTicks[CentralProcessor.TickType.USER.getIndex()];
long nice = ticks[CentralProcessor.TickType.NICE.getIndex()] - prevTicks[CentralProcessor.TickType.NICE.getIndex()];
long sys = ticks[CentralProcessor.TickType.SYSTEM.getIndex()] - prevTicks[CentralProcessor.TickType.SYSTEM.getIndex()];
long idle = ticks[CentralProcessor.TickType.IDLE.getIndex()] - prevTicks[CentralProcessor.TickType.IDLE.getIndex()];
long iowait = ticks[CentralProcessor.TickType.IOWAIT.getIndex()] - prevTicks[CentralProcessor.TickType.IOWAIT.getIndex()];
long irq = ticks[CentralProcessor.TickType.IRQ.getIndex()] - prevTicks[CentralProcessor.TickType.IRQ.getIndex()];
long softirq = ticks[CentralProcessor.TickType.SOFTIRQ.getIndex()] - prevTicks[CentralProcessor.TickType.SOFTIRQ.getIndex()];
long steal = ticks[CentralProcessor.TickType.STEAL.getIndex()] - prevTicks[CentralProcessor.TickType.STEAL.getIndex()];
long totalCpu = user + nice + sys + idle + iowait + irq + softirq + steal;
processorInfo.add(String.format(
"User: %.1f%% Nice: %.1f%% System: %.1f%% Idle: %.1f%% IOwait: %.1f%% IRQ: %.1f%% SoftIRQ: %.1f%% Steal: %.1f%%",
100d * user / totalCpu, 100d * nice / totalCpu, 100d * sys / totalCpu, 100d * idle / totalCpu,
100d * iowait / totalCpu, 100d * irq / totalCpu, 100d * softirq / totalCpu, 100d * steal / totalCpu));
processorInfo.add(String.format("CPU load:&nbsp;&nbsp; %.1f%%",
processor.getSystemCpuLoadBetweenTicks(prevTicks) * 100));
double[] loadAverage = processor.getSystemLoadAverage(3);
processorInfo.add("CPU load averages:&nbsp;&nbsp;" + (loadAverage[0] < 0 ? " N/A" : String.format(" %.2f", loadAverage[0]))
+ (loadAverage[1] < 0 ? " N/A" : String.format(" %.2f", loadAverage[1]))
+ (loadAverage[2] < 0 ? " N/A" : String.format(" %.2f", loadAverage[2])));
// per core CPU
StringBuilder procCpu = new StringBuilder("CPU load per processor:&nbsp;&nbsp;");
double[] load = processor.getProcessorCpuLoadBetweenTicks(prevProcTicks);
for (double avg : load) {
procCpu.append(String.format(" %.1f%%", avg * 100));
}
processorInfo.add(procCpu.toString());
long freq = processor.getVendorFreq();
if (freq > 0) {
processorInfo.add("Vendor Frequency:&nbsp;&nbsp; " + FormatUtil.formatHertz(freq));
}
freq = processor.getMaxFreq();
if (freq > 0) {
processorInfo.add("Max Frequency:&nbsp;&nbsp; " + FormatUtil.formatHertz(freq));
}
long[] freqs = processor.getCurrentFreq();
if (freqs[0] > 0) {
StringBuilder sb = new StringBuilder("Current Frequencies:&nbsp;&nbsp; ");
for (int i = 0; i < freqs.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(FormatUtil.formatHertz(freqs[i]));
}
processorInfo.add(sb.toString());
}
return processorInfo;
}
private List<String> getMemory(GlobalMemory memory) {
List<String> memoryInfo = new ArrayList<>();
memoryInfo.add("Memory:&nbsp;&nbsp; " + FormatUtil.formatBytes(memory.getAvailable()) + "/"
+ FormatUtil.formatBytes(memory.getTotal()));
VirtualMemory vm = memory.getVirtualMemory();
memoryInfo.add("Swap used:&nbsp;&nbsp; " + FormatUtil.formatBytes(vm.getSwapUsed()) + "/"
+ FormatUtil.formatBytes(vm.getSwapTotal()));
return memoryInfo;
}
private List<String> getProcesses(OperatingSystem os, GlobalMemory memory) {
List<String> processInfo = new ArrayList<>();
processInfo.add("Processes:&nbsp;&nbsp; " + os.getProcessCount() + ", Threads:&nbsp;&nbsp; " + os.getThreadCount());
// Sort by highest CPU
List<OSProcess> procs = Arrays.asList(os.getProcesses(5, OperatingSystem.ProcessSort.CPU));
processInfo.add("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; PID %CPU %MEM VSZ RSS Name");
for (int i = 0; i < procs.size() && i < 5; i++) {
OSProcess p = procs.get(i);
processInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; %5d %5.1f %4.1f %9s %9s %s", p.getProcessID(),
100d * (p.getKernelTime() + p.getUserTime()) / p.getUpTime(),
100d * p.getResidentSetSize() / memory.getTotal(), FormatUtil.formatBytes(p.getVirtualSize()),
FormatUtil.formatBytes(p.getResidentSetSize()), p.getName()));
}
return processInfo;
}
private List<String> getDisks(HWDiskStore[] diskStores) {
List<String> diskInfo = new ArrayList<>();
diskInfo.add("Disks:&nbsp;&nbsp;");
for (HWDiskStore disk : diskStores) {
boolean readwrite = disk.getReads() > 0 || disk.getWrites() > 0;
diskInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; %s: (model: %s - S/N: %s) size: %s, reads: %s (%s), writes: %s (%s), xfer: %s ms",
disk.getName(), disk.getModel(), disk.getSerial(),
disk.getSize() > 0 ? FormatUtil.formatBytesDecimal(disk.getSize()) : "?",
readwrite ? disk.getReads() : "?", readwrite ? FormatUtil.formatBytes(disk.getReadBytes()) : "?",
readwrite ? disk.getWrites() : "?", readwrite ? FormatUtil.formatBytes(disk.getWriteBytes()) : "?",
readwrite ? disk.getTransferTime() : "?"));
HWPartition[] partitions = disk.getPartitions();
for (HWPartition part : partitions) {
diskInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; |-- %s: %s (%s) Maj:Min=%d:%d, size: %s%s", part.getIdentification(),
part.getName(), part.getType(), part.getMajor(), part.getMinor(),
FormatUtil.formatBytesDecimal(part.getSize()),
part.getMountPoint().isEmpty() ? "" : " @ " + part.getMountPoint()));
}
}
return diskInfo;
}
private List<String> getFileSystem(FileSystem fileSystem) {
List<String> fsInfo = new ArrayList<>();
fsInfo.add("File System:&nbsp;&nbsp;");
fsInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;File Descriptors: %d/%d", fileSystem.getOpenFileDescriptors(),
fileSystem.getMaxFileDescriptors()));
OSFileStore[] fsArray = fileSystem.getFileStores();
for (OSFileStore fs : fsArray) {
long usable = fs.getUsableSpace();
long total = fs.getTotalSpace();
fsInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;%s (%s) [%s] %s of %s free (%.1f%%), %s of %s files free (%.1f%%) is %s " +
(fs.getLogicalVolume() != null && fs.getLogicalVolume().length() > 0 ? "[%s]" : "%s") +
" and is mounted at %s",
fs.getName(), fs.getDescription().isEmpty() ? "file system" : fs.getDescription(), fs.getType(),
FormatUtil.formatBytes(usable), FormatUtil.formatBytes(fs.getTotalSpace()), 100d * usable / total,
FormatUtil.formatValue(fs.getFreeInodes(), ""), FormatUtil.formatValue(fs.getTotalInodes(), ""),
100d * fs.getFreeInodes() / fs.getTotalInodes(), fs.getVolume(), fs.getLogicalVolume(),
fs.getMount()));
}
return fsInfo;
}
private List<String> getNetworkInterfaces(NetworkIF[] networkIFs) {
List<String> getNetwork = new ArrayList<>();
getNetwork.add("Network interfaces:&nbsp;&nbsp;");
for (NetworkIF net : networkIFs) {
getNetwork.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;Name: %s (%s)", net.getName(), net.getDisplayName()));
getNetwork.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;MAC Address: %s", net.getMacaddr()));
getNetwork.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;MTU: %s, Speed: %s", net.getMTU(), FormatUtil.formatValue(net.getSpeed(), "bps")));
getNetwork.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;IPv4: %s", Arrays.toString(net.getIPv4addr())));
getNetwork.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;IPv6: %s", Arrays.toString(net.getIPv6addr())));
boolean hasData = net.getBytesRecv() > 0 || net.getBytesSent() > 0 || net.getPacketsRecv() > 0
|| net.getPacketsSent() > 0;
getNetwork.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Traffic: received %s/%s%s; transmitted %s/%s%s",
hasData ? net.getPacketsRecv() + " packets" : "?",
hasData ? FormatUtil.formatBytes(net.getBytesRecv()) : "?",
hasData ? " (" + net.getInErrors() + " err)" : "",
hasData ? net.getPacketsSent() + " packets" : "?",
hasData ? FormatUtil.formatBytes(net.getBytesSent()) : "?",
hasData ? " (" + net.getOutErrors() + " err)" : ""));
}
return getNetwork;
}
private List<String> getNetworkParameters(NetworkParams networkParams) {
List<String> networkParameterInfo = new ArrayList<>();
networkParameterInfo.add("Network parameters:&nbsp;&nbsp;&nbsp;&nbsp;");
networkParameterInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Host name: %s", networkParams.getHostName()));
networkParameterInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; Domain name: %s", networkParams.getDomainName()));
networkParameterInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; DNS servers: %s", Arrays.toString(networkParams.getDnsServers())));
networkParameterInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; IPv4 Gateway: %s", networkParams.getIpv4DefaultGateway()));
networkParameterInfo.add(String.format("&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; IPv6 Gateway: %s", networkParams.getIpv6DefaultGateway()));
return networkParameterInfo;
}
}

View File

@ -0,0 +1,154 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.qe.HelpModule;
import org.apache.doris.qe.HelpTopic;
import com.google.common.base.Strings;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
@RestController
@RequestMapping("/rest/v1")
public class HelpController {
private String queryString = null;
@RequestMapping(path = "/help", method = RequestMethod.GET)
public Object helpSearch(HttpServletRequest request) {
this.queryString = request.getParameter("query");
if (Strings.isNullOrEmpty(queryString)) {
// ATTN: according to Mysql protocol, the default query should be "contents"
// when you want to get server side help.
queryString = "contents";
} else {
queryString = queryString.trim();
}
Map<String, Object> result = new HashMap<>();
appendHelpInfo(result);
return ResponseEntityBuilder.ok(result);
}
private void appendHelpInfo(Map<String, Object> result) {
appendExactMatchTopic(result);
appendFuzzyMatchTopic(result);
appendCategories(result);
}
private void appendExactMatchTopic(Map<String, Object> result) {
HelpModule module = HelpModule.getInstance();
HelpTopic topic = module.getTopic(queryString);
if (topic == null) {
result.put("matching", "No Exact Matching Topic.");
} else {
Map<String, Object> subMap = new HashMap<>();
appendOneTopicInfo(subMap, topic, "matching");
result.put("matchingTopic", subMap);
}
}
private void appendFuzzyMatchTopic(Map<String, Object> result) {
HelpModule module = HelpModule.getInstance();
List<String> topics = module.listTopicByKeyword(queryString);
if (topics.isEmpty()) {
result.put("fuzzy", "No Fuzzy Matching Topic");
} else if (topics.size() == 1) {
result.put("fuzzy", "Find only one topic, show you the detail info below");
Map<String, Object> subMap = new HashMap<>();
appendOneTopicInfo(subMap, module.getTopic(topics.get(0)), "fuzzy");
result.put("fuzzyTopic", subMap);
} else {
result.put("size", topics.size());
result.put("datas", topics);
}
}
private void appendCategories(Map<String, Object> result) {
HelpModule module = HelpModule.getInstance();
List<String> categories = module.listCategoryByName(queryString);
if (categories.isEmpty()) {
result.put("matching", "No Matching Category");
} else if (categories.size() == 1) {
result.put("matching", "Find only one category, so show you the detail info below");
List<String> topics = module.listTopicByCategory(categories.get(0));
if (topics.size() > 0) {
List<Map<String, String>> topic_list = new ArrayList<>();
result.put("topicSize", topics.size());
for (String topic : topics) {
Map<String, String> top = new HashMap<>();
top.put("name", topic);
topic_list.add(top);
}
result.put("topicdatas", topic_list);
}
List<String> subCategories = module.listCategoryByCategory(categories.get(0));
if (subCategories.size() > 0) {
List<Map<String, String>> subCate = new ArrayList<>();
result.put("subCateSize", subCategories.size());
for (String sub : subCategories) {
Map<String, String> subMap = new HashMap<>();
subMap.put("name", sub);
subCate.add(subMap);
}
result.put("subdatas", subCate);
}
} else {
List<Map<String, String>> category_list = new ArrayList<>();
if (categories.size() > 0) {
result.put("categoriesSize", categories.size());
for (String cate : categories) {
Map<String, String> subMap = new HashMap<>();
subMap.put("name", cate);
category_list.add(subMap);
}
result.put("categoryDatas", category_list);
}
}
}
// The browser will combine continuous whitespace to one, we use <pre> tag to solve this issue.
private void appendOneTopicInfo(Map<String, Object> result, HelpTopic topic, String prefix) {
result.put(prefix + "topic", escapeHtmlInPreTag(topic.getName()));
result.put(prefix + "description", escapeHtmlInPreTag(topic.getDescription()));
result.put(prefix + "example", escapeHtmlInPreTag(topic.getExample()));
result.put(prefix + "Keyword", escapeHtmlInPreTag(topic.getKeywords().toString()));
result.put(prefix + "Url", escapeHtmlInPreTag(topic.getUrl()));
}
protected String escapeHtmlInPreTag(String oriStr) {
if (oriStr == null) {
return "";
}
String content = oriStr.replaceAll("\n", "</br>");
return content;
}
}

View File

@ -0,0 +1,151 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.common.Config;
import org.apache.doris.common.Log4jConfig;
import org.apache.doris.httpv2.config.ReadEnvironment;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
@RestController
@RequestMapping("/rest/v1")
public class LogController {
private static final Logger LOG = LogManager.getLogger(LogController.class);
private static long WEB_LOG_BYTES = 1024 * 1024; // 1MB
private String addVerboseName;
private String delVerboseName;
@Autowired
private ReadEnvironment readEnvironment;
@RequestMapping(path = "/log", method = RequestMethod.GET)
public Object log(HttpServletRequest request) {
Map<String, Map<String, String>> map = new HashMap<>();
appendLogConf(map);
appendLogInfo(map);
return ResponseEntityBuilder.ok(map);
}
@RequestMapping(path = "/log", method = RequestMethod.POST)
public Object logLevel(HttpServletRequest request) {
Map<String, Map<String, String>> map = new HashMap<>();
// get parameters
addVerboseName = request.getParameter("add_verbose");
delVerboseName = request.getParameter("del_verbose");
LOG.info("add verbose name: {}, del verbose name: {}", addVerboseName, delVerboseName);
appendLogConf(map);
return ResponseEntityBuilder.ok(map);
}
private void appendLogConf(Map<String, Map<String, String>> content) {
Map<String, String> map = new HashMap<>();
try {
Log4jConfig.Tuple<String, String[], String[]> configs = Log4jConfig.updateLogging(null, null, null);
if (!Strings.isNullOrEmpty(addVerboseName)) {
addVerboseName = addVerboseName.trim();
List<String> verboseNames = Lists.newArrayList(configs.y);
if (!verboseNames.contains(addVerboseName)) {
verboseNames.add(addVerboseName);
configs = Log4jConfig.updateLogging(null, verboseNames.toArray(new String[verboseNames.size()]),
null);
readEnvironment.reinitializeLoggingSystem();
}
}
if (!Strings.isNullOrEmpty(delVerboseName)) {
delVerboseName = delVerboseName.trim();
List<String> verboseNames = Lists.newArrayList(configs.y);
if (verboseNames.contains(delVerboseName)) {
verboseNames.remove(delVerboseName);
configs = Log4jConfig.updateLogging(null, verboseNames.toArray(new String[verboseNames.size()]),
null);
readEnvironment.reinitializeLoggingSystem();
}
}
map.put("Level", configs.x);
map.put("VerboseNames", StringUtils.join(configs.y, ","));
map.put("AuditNames", StringUtils.join(configs.z, ","));
content.put("LogConfiguration", map);
} catch (IOException e) {
LOG.error(e);
e.printStackTrace();
}
}
private void appendLogInfo(Map<String, Map<String, String>> content) {
Map<String, String> map = new HashMap<>();
final String logPath = Config.sys_log_dir + "/fe.warn.log";
map.put("logPath", logPath);
RandomAccessFile raf = null;
try {
raf = new RandomAccessFile(logPath, "r");
long fileSize = raf.length();
long startPos = fileSize < WEB_LOG_BYTES ? 0L : fileSize - WEB_LOG_BYTES;
long webContentLength = fileSize < WEB_LOG_BYTES ? fileSize : WEB_LOG_BYTES;
raf.seek(startPos);
map.put("showingLast", webContentLength + " bytes of log");
StringBuilder sb = new StringBuilder();
String line = "";
sb.append("<pre>");
while ((line = raf.readLine()) != null) {
sb.append(line).append("</br>");
}
sb.append("</pre>");
map.put("log", sb.toString());
} catch (FileNotFoundException e) {
map.put("error", "Couldn't open log file: " + logPath);
} catch (IOException e) {
map.put("error", "Failed to read log file: " + logPath);
} finally {
try {
if (raf != null) {
raf.close();
}
} catch (IOException e) {
LOG.warn("fail to close log file: " + logPath, e);
}
}
content.put("LogContents", map);
}
}

View File

@ -0,0 +1,41 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.HashMap;
import java.util.Map;
@RestController
@RequestMapping("/rest/v1")
public class LoginController extends BaseController {
@RequestMapping(path = "/login", method = RequestMethod.POST)
public Object login(HttpServletRequest request, HttpServletResponse response) {
checkAuthWithCookie(request, response);
Map<String, Object> msg = new HashMap<>();
msg.put("code", 200);
msg.put("msg", "Login success!");
return msg;
}
}

View File

@ -0,0 +1,48 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.httpv2.HttpAuthManager;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@RestController
@RequestMapping("/rest/v1")
public class LogoutController extends BaseController {
@RequestMapping(path = "/logout", method = RequestMethod.POST)
public Object login(HttpServletRequest request, HttpServletResponse response) {
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if (cookie.getName() != null && cookie.getName().equals(PALO_SESSION_ID)) {
String sessionId = cookie.getValue();
HttpAuthManager.getInstance().removeSession(sessionId);
}
}
}
return ResponseEntityBuilder.ok();
}
}

View File

@ -0,0 +1,99 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.common.util.ProfileManager;
import org.apache.doris.httpv2.entity.ResponseBody;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/rest/v1")
public class QueryProfileController extends BaseController {
private static final Logger LOG = LogManager.getLogger(QueryProfileController.class);
private static final String QUERY_ID = "query_id";
@RequestMapping(path = "/query_profile/{" + QUERY_ID + "}", method = RequestMethod.GET)
public Object profile(@PathVariable(value = QUERY_ID) String queryId) {
String profile = ProfileManager.getInstance().getProfile(queryId);
if (profile == null) {
return ResponseEntityBuilder.okWithCommonError("Query " + queryId + " does not exist");
}
profile = profile.replaceAll("\n", "</br>");
profile = profile.replaceAll(" ", "&nbsp;&nbsp;");
return ResponseEntityBuilder.ok(profile);
}
@RequestMapping(path = "/query_profile", method = RequestMethod.GET)
public Object query() {
Map<String, Object> result = Maps.newHashMap();
addFinishedQueryInfo(result);
ResponseEntity entity = ResponseEntityBuilder.ok(result);
((ResponseBody) entity.getBody()).setCount(result.size());
return entity;
}
private void addFinishedQueryInfo(Map<String, Object> result) {
List<List<String>> finishedQueries = ProfileManager.getInstance().getAllQueries();
List<String> columnHeaders = ProfileManager.PROFILE_HEADERS;
int queryIdIndex = 0; // the first column is 'Query ID' by default
for (int i = 0; i < columnHeaders.size(); ++i) {
if (columnHeaders.get(i).equals(ProfileManager.QUERY_ID)) {
queryIdIndex = i;
break;
}
}
result.put("column_names", columnHeaders);
result.put("href_column", Lists.newArrayList(ProfileManager.QUERY_ID));
List<Map<String, Object>> list = Lists.newArrayList();
result.put("rows", list);
for (List<String> row : finishedQueries) {
String queryId = row.get(queryIdIndex);
Map<String, Object> rowMap = new HashMap<>();
for (int i = 0; i < row.size(); ++i) {
rowMap.put(columnHeaders.get(i), row.get(i));
}
// add hyper link
if (Strings.isNullOrEmpty(queryId)) {
rowMap.put("__hrefPaths", Lists.newArrayList("/query_profile/-1"));
} else {
rowMap.put("__hrefPaths", Lists.newArrayList("/query_profile/" + queryId));
}
list.add(rowMap);
}
}
}

View File

@ -0,0 +1,85 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.httpv2.entity.ResponseBody;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.service.ExecuteEnv;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/rest/v1")
public class SessionController extends BaseController {
private static final List<String> SESSION_TABLE_HEADER = Lists.newArrayList();
static {
SESSION_TABLE_HEADER.add("Id");
SESSION_TABLE_HEADER.add("User");
SESSION_TABLE_HEADER.add("Host");
SESSION_TABLE_HEADER.add("Cluster");
SESSION_TABLE_HEADER.add("Db");
SESSION_TABLE_HEADER.add("Command");
SESSION_TABLE_HEADER.add("Time");
SESSION_TABLE_HEADER.add("State");
SESSION_TABLE_HEADER.add("Info");
}
@RequestMapping(path = "/session", method = RequestMethod.GET)
public Object session() {
Map<String, Object> result = Maps.newHashMap();
appendSessionInfo(result);
ResponseEntity entity = ResponseEntityBuilder.ok(result);
((ResponseBody) entity.getBody()).setCount(result.size());
return entity;
}
private void appendSessionInfo(Map<String, Object> result) {
List<ConnectContext.ThreadInfo> threadInfos = ExecuteEnv.getInstance().getScheduler().listConnection("root");
List<List<String>> rows = Lists.newArrayList();
result.put("column_names", SESSION_TABLE_HEADER);
List<Map<String, String>> list = Lists.newArrayList();
result.put("rows", list);
long nowMs = System.currentTimeMillis();
for (ConnectContext.ThreadInfo info : threadInfos) {
rows.add(info.toRow(nowMs));
}
for (List<String> row : rows) {
Map<String, String> record = new HashMap<>();
for (int i = 0; i < row.size(); i++) {
record.put(SESSION_TABLE_HEADER.get(i), row.get(i));
}
list.add(record);
}
}
}

View File

@ -0,0 +1,199 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.controller;
import org.apache.doris.analysis.RedirectStatus;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.proc.ProcDirInterface;
import org.apache.doris.common.proc.ProcNodeInterface;
import org.apache.doris.common.proc.ProcResult;
import org.apache.doris.common.proc.ProcService;
import org.apache.doris.httpv2.entity.ResponseBody;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.qe.MasterOpExecutor;
import org.apache.doris.qe.OriginStatement;
import org.apache.doris.qe.ShowResultSet;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
@RestController
@RequestMapping("/rest/v1")
public class SystemController extends BaseController {
private static final Logger LOG = LogManager.getLogger(SystemController.class);
@RequestMapping(path = "/system", method = RequestMethod.GET)
public Object system(HttpServletRequest request) {
String currentPath = request.getParameter("path");
if (Strings.isNullOrEmpty(currentPath)) {
currentPath = "/";
}
LOG.debug("get /system requset, thread id: {}", Thread.currentThread().getId());
ResponseEntity entity = appendSystemInfo(currentPath, currentPath,request);
return entity;
}
protected ProcNodeInterface getProcNode(String path) {
ProcService instance = ProcService.getInstance();
ProcNodeInterface node;
try {
if (Strings.isNullOrEmpty(path)) {
node = instance.open("/");
} else {
node = instance.open(path);
}
} catch (AnalysisException e) {
LOG.warn(e.getMessage());
return null;
}
return node;
}
private ResponseEntity appendSystemInfo(String procPath, String path, HttpServletRequest request) {
UrlValidator validator = new UrlValidator();
Map<String, Object> map = new HashMap<>();
ProcNodeInterface procNode = getProcNode(procPath);
if (procNode == null) {
return ResponseEntityBuilder.notFound("No such proc path[" + path + "]");
}
boolean isDir = (procNode instanceof ProcDirInterface);
List<String> columnNames = null;
List<List<String>> rows = null;
if (!Catalog.getCurrentCatalog().isMaster()) {
// forward to master
String showProcStmt = "SHOW PROC \"" + procPath + "\"";
MasterOpExecutor masterOpExecutor = new MasterOpExecutor(new OriginStatement(showProcStmt, 0),
ConnectContext.get(), RedirectStatus.FORWARD_NO_SYNC);
try {
masterOpExecutor.execute();
} catch (Exception e) {
LOG.warn("Fail to forward. ", e);
return ResponseEntityBuilder.internalError("Failed to forward request to master: " + e.getMessage());
}
ShowResultSet resultSet = masterOpExecutor.getProxyResultSet();
if (resultSet == null) {
return ResponseEntityBuilder.internalError("Failed to get result from master");
}
columnNames = resultSet.getMetaData().getColumns().stream().map(c -> c.getName()).collect(
Collectors.toList());
rows = resultSet.getResultRows();
} else {
ProcResult result;
try {
result = procNode.fetchResult();
} catch (AnalysisException e) {
return ResponseEntityBuilder.internalError("The result is null."
+ "Maybe haven't be implemented completely[" + e.getMessage() + "], please check."
+ "INFO: ProcNode type is [" + procNode.getClass().getName() + "]: "
+ e.getMessage());
}
columnNames = result.getColumnNames();
rows = result.getRows();
}
Preconditions.checkNotNull(columnNames);
Preconditions.checkNotNull(rows);
Map<String, Object> result = Maps.newHashMap();
result.put("column_names", columnNames);
List<String> hrefColumns = Lists.newArrayList();
if (isDir) {
hrefColumns.add(columnNames.get(0));
}
List<Map<String, Object>> list = Lists.newArrayList();
for (List<String> strList : rows) {
Map<String, Object> rowColumns = new HashMap<>();
List<String> hrefPaths = Lists.newArrayList();
for (int i = 0; i < strList.size(); i++) {
String str = strList.get(i);
if (isDir && i == 0) {
// the first column of dir proc is always a href column
String escapeStr = str.replace("%", "%25");
String uriPath = "path=" + path + "/" + escapeStr;
hrefPaths.add("/rest/v1/system?" + uriPath);
} else if (validator.isValid(str)) {
// if the value is a URL, add it to href columns, and change the content to "URL"
hrefPaths.add(str);
str = "URL";
if (!hrefColumns.contains(columnNames.get(i))) {
hrefColumns.add(columnNames.get(i));
}
}
rowColumns.put(columnNames.get(i), str);
}
if (!hrefPaths.isEmpty()) {
rowColumns.put("__hrefPaths", hrefPaths);
}
list.add(rowColumns);
}
result.put("rows", list);
// assemble href column names
if (!hrefColumns.isEmpty()) {
result.put("href_columns", hrefColumns);
}
// add parent url
result.put("parent_url", getParentUrl(path));
ResponseEntity entity = ResponseEntityBuilder.ok(result);
((ResponseBody) entity.getBody()).setCount(list.size());
return entity;
}
private String getParentUrl(String pathStr) {
Path path = Paths.get(pathStr);
path = path.getParent();
if (path == null) {
return "/rest/v1/system";
} else {
return "/rest/v1/system?path=" + path.toString();
}
}
}

View File

@ -0,0 +1,100 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.entity;
import org.apache.doris.httpv2.rest.RestApiStatusCode;
/**
* The response body of restful api.
* <p>
* The getter setter methods of all member variables need to be retained
* to ensure that Spring can perform json format conversion.
*
* @param <T> type of data
*/
public class ResponseBody<T> {
// Used to describe the error message. If there are no errors, it displays "OK"
private String msg;
// The user displays an error code.
// If there is no error, 0 is displayed.
// If there is an error, it is usually Doris's internal error code, not the HTTP standard error code.
// The HTTP standard error code should be reflected in the return value of the HTTP protocol.
private int code = RestApiStatusCode.OK.code;
// to save the response body
private T data;
// to save the number of records in response body.
// currently not used and always be 0.
private int count;
public ResponseBody() {
}
public ResponseBody msg(String msg) {
this.msg = msg;
return this;
}
public ResponseBody code(RestApiStatusCode code) {
this.code = code.code;
return this;
}
public ResponseBody data(T data) {
this.data = data;
return this;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
public void setCount(int count) {
this.count = count;
}
public int getCount() {
return count;
}
public ResponseBody commonError(String msg) {
this.code = RestApiStatusCode.COMMON_ERROR.code;
this.msg = msg;
return this;
}
}

View File

@ -0,0 +1,60 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.entity;
import org.apache.doris.httpv2.rest.RestApiStatusCode;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
/**
* A utility class for creating a ResponseEntity easier.
*/
public class ResponseEntityBuilder {
public static ResponseEntity badRequest(Object data) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(data);
}
public static ResponseEntity okWithCommonError(String msg) {
ResponseBody body = new ResponseBody().code(RestApiStatusCode.COMMON_ERROR).commonError(msg);
return ResponseEntity.status(HttpStatus.OK).body(body);
}
public static ResponseEntity ok(Object data) {
ResponseBody body = new ResponseBody().code(RestApiStatusCode.OK).msg("success").data(data);
return ResponseEntity.status(HttpStatus.OK).body(body);
}
public static ResponseEntity ok() {
ResponseBody body = new ResponseBody().code(RestApiStatusCode.OK).msg("success");
return ResponseEntity.status(HttpStatus.OK).body(body);
}
public static ResponseEntity unauthorized(Object data) {
return ResponseEntity.status(HttpStatus.UNAUTHORIZED).body(data);
}
public static ResponseEntity internalError(Object data) {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(data);
}
public static ResponseEntity notFound(Object data) {
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(data);
}
}

View File

@ -0,0 +1,24 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.exception;
public class BadRequestException extends RuntimeException {
public BadRequestException(String msg) {
super(msg);
}
}

View File

@ -0,0 +1,64 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.exception;
import org.apache.doris.common.UserException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* A handler to handle all exceptions of restful api
*/
@ControllerAdvice
public class RestApiExceptionHandler {
private static final Logger LOG = LogManager.getLogger(RestApiExceptionHandler.class);
@ExceptionHandler(UnauthorizedException.class)
@ResponseBody
public Object unauthorizedHandler(UnauthorizedException e) {
LOG.debug("unauthorized exception", e);
return ResponseEntityBuilder.unauthorized(e.getMessage());
}
@ExceptionHandler(UserException.class)
@ResponseBody
public Object userExceptionHandler(UserException e) {
LOG.debug("user exception", e);
return ResponseEntityBuilder.ok(e.getMessage());
}
@ExceptionHandler(BadRequestException.class)
@ResponseBody
public Object badRequestExceptionHandler(BadRequestException e) {
LOG.debug("bad request exception", e);
return ResponseEntityBuilder.badRequest(e.getMessage());
}
@ExceptionHandler(Exception.class)
@ResponseBody
public Object unexpectedExceptionHandler(Exception e) {
LOG.debug("unexpected exception", e);
return ResponseEntityBuilder.internalError(e.getMessage());
}
}

View File

@ -0,0 +1,24 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.exception;
public class UnauthorizedException extends RuntimeException {
public UnauthorizedException(String msg) {
super(msg);
}
}

View File

@ -0,0 +1,66 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.interceptor;
import org.apache.doris.httpv2.controller.BaseController;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.json.JSONObject;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class AuthInterceptor extends BaseController implements HandlerInterceptor {
private static final Logger LOG = LogManager.getLogger(AuthInterceptor.class);
@Override
public boolean preHandle(HttpServletRequest request,
HttpServletResponse response, Object handler) throws Exception {
LOG.debug("get prehandle. thread: {}", Thread.currentThread().getId());
// String sessionId = getCookieValue(request, BaseController.PALO_SESSION_ID, response);
// SessionValue sessionValue = HttpAuthManager.getInstance().getSessionValue(sessionId);
String method = request.getMethod();
if (method.equalsIgnoreCase(RequestMethod.OPTIONS.toString())) {
response.setStatus(HttpStatus.NO_CONTENT.value());
return true;
}
checkAuthWithCookie(request, response);
return true;
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
}
private String toJson(Map<String, Object> map) {
JSONObject root = new JSONObject(map);
return root.toString();
}
}

View File

@ -0,0 +1,168 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.meta;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.ColocateGroupSchema;
import org.apache.doris.catalog.ColocateTableIndex;
import org.apache.doris.catalog.ColocateTableIndex.GroupId;
import org.apache.doris.common.DdlException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.rest.RestBaseController;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.persist.ColocatePersistInfo;
import org.apache.doris.qe.ConnectContext;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import com.google.common.base.Preconditions;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.lang.reflect.Type;
import java.util.List;
/*
* the colocate meta define in {@link ColocateTableIndex}
* The actions in ColocateMetaService is for modifying or showing colocate group info manually.
*
* ColocateMetaAction:
* get all information in ColocateTableIndex, as a json string
* eg:
* GET /api/colocate
* return:
* {"colocate_meta":{"groupName2Id":{...},"group2Tables":{}, ...},"status":"OK"}
*
* eg:
* POST /api/colocate/group_stable?db_id=123&group_id=456 (mark group[123.456] as unstable)
* DELETE /api/colocate/group_stable?db_id=123&group_id=456 (mark group[123.456] as stable)
*
* BucketSeqAction:
* change the backends per bucket sequence of a group
* eg:
* POST /api/colocate/bucketseq?db_id=123&group_id=456
*/
@RestController
public class ColocateMetaService extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(ColocateMetaService.class);
private static final String GROUP_ID = "group_id";
private static final String DB_ID = "db_id";
private static ColocateTableIndex colocateIndex = Catalog.getCurrentColocateIndex();
private static GroupId checkAndGetGroupId(HttpServletRequest request) throws DdlException {
long grpId = Long.valueOf(request.getParameter(GROUP_ID).trim());
long dbId = Long.valueOf(request.getParameter(DB_ID).trim());
GroupId groupId = new GroupId(dbId, grpId);
if (!colocateIndex.isGroupExist(groupId)) {
throw new DdlException("the group " + groupId + "isn't exist");
}
return groupId;
}
public Object executeWithoutPassword(HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
return null;
}
@RequestMapping(path = "/api/colocate", method = RequestMethod.GET)
public Object colocate(HttpServletRequest request, HttpServletResponse response) throws DdlException {
executeWithoutPassword(request, response);
return ResponseEntityBuilder.ok(Catalog.getCurrentColocateIndex());
}
@RequestMapping(path = "/api/colocate/group_stable", method = {RequestMethod.POST, RequestMethod.DELETE})
public Object group_stable(HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeWithoutPassword(request, response);
GroupId groupId = checkAndGetGroupId(request);
String method = request.getMethod();
if ("POST".equalsIgnoreCase(method)) {
colocateIndex.markGroupUnstable(groupId, true);
} else if ("DELETE".equalsIgnoreCase(method)) {
colocateIndex.markGroupStable(groupId, true);
}
return ResponseEntityBuilder.ok();
}
@RequestMapping(path = "/api/colocate/bucketseq", method = RequestMethod.POST)
public Object bucketseq(HttpServletRequest request, HttpServletResponse response, @RequestBody String meta)
throws DdlException {
executeWithoutPassword(request, response);
final String clusterName = ConnectContext.get().getClusterName();
GroupId groupId = checkAndGetGroupId(request);
Type type = new TypeToken<List<List<Long>>>() {
}.getType();
List<List<Long>> backendsPerBucketSeq = new Gson().fromJson(meta, type);
LOG.info("get buckets sequence: {}", backendsPerBucketSeq);
ColocateGroupSchema groupSchema = Catalog.getCurrentColocateIndex().getGroupSchema(groupId);
if (backendsPerBucketSeq.size() != groupSchema.getBucketsNum()) {
return ResponseEntityBuilder.okWithCommonError("Invalid bucket num. expected: "
+ groupSchema.getBucketsNum() + ", actual: " + backendsPerBucketSeq.size());
}
List<Long> clusterBackendIds = Catalog.getCurrentSystemInfo().getClusterBackendIds(clusterName, true);
//check the Backend id
for (List<Long> backendIds : backendsPerBucketSeq) {
if (backendIds.size() != groupSchema.getReplicationNum()) {
return ResponseEntityBuilder.okWithCommonError("Invalid backend num per bucket. expected: "
+ groupSchema.getReplicationNum() + ", actual: " + backendIds.size());
}
for (Long beId : backendIds) {
if (!clusterBackendIds.contains(beId)) {
return ResponseEntityBuilder.okWithCommonError("The backend " + beId
+ " does not exist or not available");
}
}
}
int bucketsNum = colocateIndex.getBackendsPerBucketSeq(groupId).size();
Preconditions.checkState(backendsPerBucketSeq.size() == bucketsNum,
backendsPerBucketSeq.size() + " vs. " + bucketsNum);
updateBackendPerBucketSeq(groupId, backendsPerBucketSeq);
LOG.info("the group {} backendsPerBucketSeq meta has been changed to {}", groupId, backendsPerBucketSeq);
return ResponseEntityBuilder.ok();
}
private void updateBackendPerBucketSeq(GroupId groupId, List<List<Long>> backendsPerBucketSeq) {
colocateIndex.addBackendsPerBucketSeq(groupId, backendsPerBucketSeq);
ColocatePersistInfo info2 = ColocatePersistInfo.createForBackendsPerBucketSeq(groupId, backendsPerBucketSeq);
Catalog.getCurrentCatalog().getEditLog().logColocateBackendsPerBucketSeq(info2);
}
}

View File

@ -0,0 +1,24 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.meta;
public class InvalidClientException extends RuntimeException {
public InvalidClientException(String msg) {
super(msg);
}
}

View File

@ -0,0 +1,25 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.meta;
public class MetaBaseAction {
private static String CONTENT_DISPOSITION = "Content-disposition";
public static final String CLUSTER_ID = "cluster_id";
public static final String TOKEN = "token";
}

View File

@ -0,0 +1,245 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.meta;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.Config;
import org.apache.doris.common.DdlException;
import org.apache.doris.ha.FrontendNodeType;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.rest.RestBaseController;
import org.apache.doris.master.MetaHelper;
import org.apache.doris.persist.MetaCleaner;
import org.apache.doris.persist.Storage;
import org.apache.doris.persist.StorageInfo;
import org.apache.doris.system.Frontend;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
@RestController
public class MetaService extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(MetaService.class);
private static final int TIMEOUT_SECOND = 10;
private static final String VERSION = "version";
private static final String HOST = "host";
private static final String PORT = "port";
private File imageDir = MetaHelper.getMasterImageDir();
private boolean isFromValidFe(HttpServletRequest request) {
String clientHost = request.getRemoteHost();
Frontend fe = Catalog.getCurrentCatalog().getFeByHost(clientHost);
if (fe == null) {
LOG.warn("request is not from valid FE. client: {}", clientHost);
return false;
}
return true;
}
private void checkFromValidFe(HttpServletRequest request)
throws InvalidClientException {
if (!isFromValidFe(request)) {
throw new InvalidClientException("invalid client host: " + request.getRemoteHost());
}
}
@RequestMapping(path = "/image", method = RequestMethod.GET)
public Object image(HttpServletRequest request, HttpServletResponse response) {
checkFromValidFe(request);
String versionStr = request.getParameter(VERSION);
if (Strings.isNullOrEmpty(versionStr)) {
return ResponseEntityBuilder.badRequest("Miss version parameter");
}
long version = checkLongParam(versionStr);
if (version < 0) {
return ResponseEntityBuilder.badRequest("The version number cannot be less than 0");
}
File imageFile = Storage.getImageFile(imageDir, version);
if (!imageFile.exists()) {
return ResponseEntityBuilder.notFound("image file not found");
}
try {
writeFileResponse(request, response, imageFile);
return null;
} catch (IOException e) {
return ResponseEntityBuilder.internalError(e.getMessage());
}
}
@RequestMapping(path = "/info", method = RequestMethod.GET)
public Object info(HttpServletRequest request, HttpServletResponse response) throws DdlException {
checkFromValidFe(request);
try {
Storage currentStorageInfo = new Storage(imageDir.getAbsolutePath());
StorageInfo storageInfo = new StorageInfo(currentStorageInfo.getClusterID(),
currentStorageInfo.getImageSeq(), currentStorageInfo.getEditsSeq());
return ResponseEntityBuilder.ok(storageInfo);
} catch (IOException e) {
return ResponseEntityBuilder.internalError(e.getMessage());
}
}
@RequestMapping(path = "/version", method = RequestMethod.GET)
public void version(HttpServletRequest request, HttpServletResponse response) throws IOException, DdlException {
checkFromValidFe(request);
File versionFile = new File(imageDir, Storage.VERSION_FILE);
writeFileResponse(request, response, versionFile);
}
@RequestMapping(path = "/put", method = RequestMethod.GET)
public Object put(HttpServletRequest request, HttpServletResponse response) throws DdlException {
checkFromValidFe(request);
String portStr = request.getParameter(PORT);
// check port to avoid SSRF(Server-Side Request Forgery)
if (Strings.isNullOrEmpty(portStr)) {
return ResponseEntityBuilder.badRequest("Port number cannot be empty");
}
int port = Integer.parseInt(portStr);
if (port < 0 || port > 65535) {
return ResponseEntityBuilder.badRequest("port is invalid. The port number is between 0-65535");
}
String versionStr = request.getParameter(VERSION);
if (Strings.isNullOrEmpty(versionStr)) {
return ResponseEntityBuilder.badRequest("Miss version parameter");
}
checkLongParam(versionStr);
String machine = request.getRemoteHost();
String url = "http://" + machine + ":" + port + "/image?version=" + versionStr;
String filename = Storage.IMAGE + "." + versionStr;
File dir = new File(Catalog.getCurrentCatalog().getImageDir());
try {
OutputStream out = MetaHelper.getOutputStream(filename, dir);
MetaHelper.getRemoteFile(url, TIMEOUT_SECOND * 1000, out);
MetaHelper.complete(filename, dir);
} catch (FileNotFoundException e) {
return ResponseEntityBuilder.notFound("file not found.");
} catch (IOException e) {
LOG.warn("failed to get remote file. url: {}", url, e);
return ResponseEntityBuilder.internalError("failed to get remote file: " + e.getMessage());
}
// Delete old image files
try {
MetaCleaner cleaner = new MetaCleaner(Config.meta_dir + "/image");
cleaner.clean();
} catch (Exception e) {
LOG.error("Follower/Observer delete old image file fail.", e);
}
return ResponseEntityBuilder.ok();
}
@RequestMapping(path = "/journal_id", method = RequestMethod.GET)
public Object journal_id(HttpServletRequest request, HttpServletResponse response) throws DdlException {
checkFromValidFe(request);
long id = Catalog.getCurrentCatalog().getReplayedJournalId();
response.setHeader("id", Long.toString(id));
return ResponseEntityBuilder.ok();
}
@RequestMapping(path = "/role", method = RequestMethod.GET)
public Object role(HttpServletRequest request, HttpServletResponse response) throws DdlException {
checkFromValidFe(request);
String host = request.getParameter(HOST);
String portString = request.getParameter(PORT);
if (!Strings.isNullOrEmpty(host) && !Strings.isNullOrEmpty(portString)) {
int port = Integer.parseInt(portString);
Frontend fe = Catalog.getCurrentCatalog().checkFeExist(host, port);
if (fe == null) {
response.setHeader("role", FrontendNodeType.UNKNOWN.name());
} else {
response.setHeader("role", fe.getRole().name());
response.setHeader("name", fe.getNodeName());
}
return ResponseEntityBuilder.ok();
} else {
return ResponseEntityBuilder.badRequest("Miss parameter");
}
}
/*
* This action is used to get the electable_nodes config and the cluster id of
* the fe with the given ip and port. When one frontend start, it should check
* the local electable_nodes config and local cluster id with other frontends.
* If there is any difference, local fe will exit. This is designed to protect
* the consistency of the cluster.
*/
@RequestMapping(path = "/check", method = RequestMethod.GET)
public Object check(HttpServletRequest request, HttpServletResponse response) throws DdlException {
checkFromValidFe(request);
try {
Storage storage = new Storage(imageDir.getAbsolutePath());
response.setHeader(MetaBaseAction.CLUSTER_ID, Integer.toString(storage.getClusterID()));
response.setHeader(MetaBaseAction.TOKEN, storage.getToken());
} catch (IOException e) {
return ResponseEntityBuilder.internalError(e.getMessage());
}
return ResponseEntityBuilder.ok();
}
@RequestMapping(value = "/dump", method = RequestMethod.GET)
public Object dump(HttpServletRequest request, HttpServletResponse response) throws DdlException {
/*
* Before dump, we acquired the catalog read lock and all databases' read lock and all
* the jobs' read lock. This will guarantee the consistency of database and job queues.
* But Backend may still inconsistent.
*
* TODO: Still need to lock ClusterInfoService to prevent add or drop Backends
*/
String dumpFilePath = Catalog.getCurrentCatalog().dumpImage();
if (dumpFilePath == null) {
return ResponseEntityBuilder.okWithCommonError("dump failed.");
}
Map<String, String> res = Maps.newHashMap();
res.put("dumpFilePath", dumpFilePath);
return ResponseEntityBuilder.ok(res);
}
}

View File

@ -0,0 +1,135 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.Config;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import com.google.common.base.Strings;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Api for checking the whether the FE has been started successfully.
* Response
* {
* "msg": "OK",
* "code": 0,
* "data": {
* "queryPort": 9030,
* "rpcPort": 9020,
* "maxReplayedJournal": 17287
* },
* "count": 0
* }
*/
@RestController
public class BootstrapFinishAction {
private static final String CLUSTER_ID = "cluster_id";
private static final String TOKEN = "token";
public static final String REPLAYED_JOURNAL_ID = "replayedJournalId";
public static final String QUERY_PORT = "queryPort";
public static final String RPC_PORT = "rpcPort";
@RequestMapping(path = "/api/bootstrap", method = RequestMethod.GET)
public ResponseEntity execute(HttpServletRequest request, HttpServletResponse response) {
boolean isReady = Catalog.getCurrentCatalog().isReady();
// to json response
BootstrapResult result = new BootstrapResult();
if (isReady) {
String clusterIdStr = request.getParameter(CLUSTER_ID);
String token = request.getParameter(TOKEN);
if (!Strings.isNullOrEmpty(clusterIdStr) && !Strings.isNullOrEmpty(token)) {
// cluster id or token is provided, return more info
int clusterId = 0;
try {
clusterId = Integer.valueOf(clusterIdStr);
} catch (NumberFormatException e) {
return ResponseEntityBuilder.badRequest("invalid cluster id format: " + clusterIdStr);
}
if (clusterId != Catalog.getCurrentCatalog().getClusterId()) {
return ResponseEntityBuilder.okWithCommonError("invalid cluster id: " + clusterId);
}
if (!token.equals(Catalog.getCurrentCatalog().getToken())) {
return ResponseEntityBuilder.okWithCommonError("invalid token: " + token);
}
// cluster id and token are valid, return replayed journal id
long replayedJournalId = Catalog.getCurrentCatalog().getReplayedJournalId();
result.setReplayedJournalId(replayedJournalId);
result.setQueryPort(Config.query_port);
result.setRpcPort(Config.rpc_port);
}
return ResponseEntityBuilder.ok(result);
}
return ResponseEntityBuilder.okWithCommonError("not ready");
}
/**
* This class is also for json DeSer, so get/set method must be remained.
*/
private static class BootstrapResult {
private long replayedJournalId = 0;
private int queryPort = 0;
private int rpcPort = 0;
public BootstrapResult() {
}
public void setReplayedJournalId(long replayedJournalId) {
this.replayedJournalId = replayedJournalId;
}
public long getReplayedJournalId() {
return replayedJournalId;
}
public void setQueryPort(int queryPort) {
this.queryPort = queryPort;
}
public int getQueryPort() {
return queryPort;
}
public void setRpcPort(int rpcPort) {
this.rpcPort = rpcPort;
}
public int getRpcPort() {
return rpcPort;
}
}
}

View File

@ -0,0 +1,87 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.common.UserException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.exception.UnauthorizedException;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import com.google.common.base.Strings;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* To cancel a load transaction with given load label
*/
@RestController
public class CancelLoadAction extends RestBaseController {
@RequestMapping(path = "/api/{" + DB_KEY + "}/_cancel", method = RequestMethod.POST)
public Object execute(@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
if (Strings.isNullOrEmpty(dbName)) {
return ResponseEntityBuilder.badRequest("No database selected");
}
String fullDbName = getFullDbName(dbName);
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label specified");
}
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("unknown database, database=" + dbName);
}
// TODO(cmy): Currently we only check priv in db level.
// Should check priv in table level.
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), fullDbName, PrivPredicate.LOAD)) {
throw new UnauthorizedException("Access denied for user '" + ConnectContext.get().getQualifiedUser()
+ "' to database '" + fullDbName + "'");
}
try {
Catalog.getCurrentGlobalTransactionMgr().abortTransaction(db.getId(), label, "user cancel");
} catch (UserException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
return ResponseEntityBuilder.ok();
}
}

View File

@ -0,0 +1,94 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.alter.SystemHandler;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.Pair;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.system.Backend;
import org.apache.doris.system.SystemInfoService;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* calc row count from replica to table
* fe_host:fe_http_port/api/check_decommission?host_ports=host:port,host2:port2...
* return:
* {
* "msg": "OK",
* "code": 0,
* "data": ["192.168.10.11:9050", "192.168.10.11:9050"],
* "count": 0
* }
*/
@RestController
public class CheckDecommissionAction extends RestBaseController {
public static final String HOST_PORTS = "host_ports";
@RequestMapping(path = "/api/check_decommission", method = RequestMethod.GET)
public Object execute(HttpServletRequest request, HttpServletResponse response) {
//check user auth
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.OPERATOR);
String hostPorts = request.getParameter(HOST_PORTS);
if (Strings.isNullOrEmpty(hostPorts)) {
return ResponseEntityBuilder.badRequest("No host:port specified");
}
String[] hostPortArr = hostPorts.split(",");
if (hostPortArr.length == 0) {
return ResponseEntityBuilder.badRequest("No host:port specified");
}
List<Pair<String, Integer>> hostPortPairs = Lists.newArrayList();
for (String hostPort : hostPortArr) {
Pair<String, Integer> pair;
try {
pair = SystemInfoService.validateHostAndPort(hostPort);
} catch (AnalysisException e) {
return ResponseEntityBuilder.badRequest(e.getMessage());
}
hostPortPairs.add(pair);
}
try {
List<Backend> backends = SystemHandler.checkDecommission(hostPortPairs);
List<String> backendsList = backends.stream().map(b -> b.getHost() + ":" + b.getHeartbeatPort()).collect(Collectors.toList());
return ResponseEntityBuilder.ok(backendsList);
} catch (DdlException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
}
}

View File

@ -0,0 +1,85 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.common.util.DebugUtil;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.service.ExecuteEnv;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* This class is used to get current query_id of connection_id.
* Every connection holds at most one query at every point.
* So we can get query_id firstly, and get query by query_id.
*
* {
* "msg": "OK",
* "code": 0,
* "data": {
* "query_id": "b52513ce3f0841ca-9cb4a96a268f2dba"
* },
* "count": 0
* }
*/
@RestController
public class ConnectionAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(ConnectionAction.class);
@RequestMapping(path = "/api/connection", method = RequestMethod.GET)
protected Object connection(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String connStr = request.getParameter("connection_id");
if (Strings.isNullOrEmpty(connStr)) {
return ResponseEntityBuilder.badRequest("Missing connection_id");
}
long connectionId = -1;
try {
connectionId = Long.valueOf(connStr.trim());
} catch (NumberFormatException e) {
return ResponseEntityBuilder.badRequest("Invalid connection id: " + e.getMessage());
}
ConnectContext context = ExecuteEnv.getInstance().getScheduler().getContext(connectionId);
if (context == null || context.queryId() == null) {
return ResponseEntityBuilder.okWithCommonError("connection id " + connectionId + " not found.");
}
String queryId = DebugUtil.printId(context.queryId());
Map<String, String> result = Maps.newHashMap();
result.put("query_id", queryId);
return ResponseEntityBuilder.ok(result);
}
}

View File

@ -0,0 +1,94 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.Table;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/*
* used to get a table's ddl stmt
* eg:
* fe_host:http_port/api/_get_ddl?db=xxx&tbl=yyy
*/
@RestController
public class GetDdlStmtAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(GetDdlStmtAction.class);
@RequestMapping(path = "/api/_get_ddl", method = RequestMethod.GET)
public Object execute(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String dbName = request.getParameter(DB_KEY);
String tableName = request.getParameter(TABLE_KEY);
if (Strings.isNullOrEmpty(dbName) || Strings.isNullOrEmpty(tableName)) {
return ResponseEntityBuilder.badRequest("Missing params. Need database name and Table name");
}
String fullDbName = getFullDbName(dbName);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database[" + dbName + "] does not exist");
}
List<String> createTableStmt = Lists.newArrayList();
List<String> addPartitionStmt = Lists.newArrayList();
List<String> createRollupStmt = Lists.newArrayList();
db.readLock();
try {
Table table = db.getTable(tableName);
if (table == null) {
return ResponseEntityBuilder.okWithCommonError("Table[" + tableName + "] does not exist");
}
Catalog.getDdlStmt(table, createTableStmt, addPartitionStmt, createRollupStmt, true, false /* show password */);
} finally {
db.readUnlock();
}
Map<String, List<String>> results = Maps.newHashMap();
results.put("create_table", createTableStmt);
results.put("create_partition", addPartitionStmt);
results.put("create_rollup", createRollupStmt);
return ResponseEntityBuilder.ok(results);
}
}

View File

@ -0,0 +1,91 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.MetaNotFoundException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.load.Load;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import com.google.common.base.Strings;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
// Get load information of one load job
@RestController
public class GetLoadInfoAction extends RestBaseController {
protected Catalog catalog;
@RequestMapping(path = "/api/{" + DB_KEY + "}/_load_info", method = RequestMethod.GET)
public Object execute(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
this.catalog = Catalog.getCurrentCatalog();
String fullDbName = getFullDbName(dbName);
Load.JobInfo info = new Load.JobInfo(fullDbName,
request.getParameter(LABEL_KEY),
ConnectContext.get().getClusterName());
if (Strings.isNullOrEmpty(info.dbName)) {
return ResponseEntityBuilder.badRequest("No database selected");
}
if (Strings.isNullOrEmpty(info.label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
if (Strings.isNullOrEmpty(info.clusterName)) {
return ResponseEntityBuilder.badRequest("No cluster selected");
}
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
try {
catalog.getLoadInstance().getJobInfo(info);
if (info.tblNames.isEmpty()) {
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), info.dbName, PrivPredicate.LOAD);
} else {
for (String tblName : info.tblNames) {
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), info.dbName, tblName,
PrivPredicate.LOAD);
}
}
} catch (DdlException | MetaNotFoundException e) {
try {
catalog.getLoadManager().getLoadJobInfo(info);
} catch (DdlException e1) {
return ResponseEntityBuilder.okWithCommonError(e1.getMessage());
}
}
return ResponseEntityBuilder.ok(info);
}
}

View File

@ -0,0 +1,128 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.common.Config;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.codehaus.jackson.map.ObjectMapper;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/*
* get log file infos:
* curl -I http://fe_host:http_port/api/get_log_file?type=fe.audit.log
* return:
* HTTP/1.1 200 OK
* file_infos: {"fe.audit.log":24759,"fe.audit.log.20190528.1":132934}
* content-type: text/html
* connection: keep-alive
*
* get log file:
* curl -X GET http://fe_host:http_port/api/get_log_file?type=fe.audit.log&file=fe.audit.log.20190528.1
*/
@RestController
public class GetLogFileAction extends RestBaseController {
private final Set<String> logFileTypes = Sets.newHashSet("fe.audit.log");
@RequestMapping(path = "/api/get_log_file", method = {RequestMethod.GET, RequestMethod.HEAD})
public Object execute(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String logType = request.getParameter("type");
String logFile = request.getParameter("file");
// check param empty
if (Strings.isNullOrEmpty(logType)) {
return ResponseEntityBuilder.badRequest("Miss type parameter");
}
// check type valid or not
if (!logFileTypes.contains(logType)) {
return ResponseEntityBuilder.badRequest("log type: " + logType + " is invalid!");
}
String method = request.getMethod();
if (method.equals(RequestMethod.HEAD.name())) {
String fileInfos = getFileInfos(logType);
response.setHeader("file_infos", fileInfos);
return ResponseEntityBuilder.ok();
} else if (method.equals(RequestMethod.GET.name())) {
File log = getLogFile(logType, logFile);
if (!log.exists() || !log.isFile()) {
return ResponseEntityBuilder.okWithCommonError("Log file not exist: " + log.getName());
}
if (log != null) {
try {
getFile(request, response, log, log.getName());
} catch (IOException e) {
return ResponseEntityBuilder.internalError(e.getMessage());
}
} else {
return ResponseEntityBuilder.okWithCommonError("Log file not exist: " + log.getName());
}
}
return ResponseEntityBuilder.ok();
}
private String getFileInfos(String logType) {
Map<String, Long> fileInfos = Maps.newTreeMap();
if (logType.equals("fe.audit.log")) {
File logDir = new File(Config.audit_log_dir);
File[] files = logDir.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isFile() && files[i].getName().startsWith("fe.audit.log")) {
fileInfos.put(files[i].getName(), files[i].length());
}
}
}
String result = "";
ObjectMapper mapper = new ObjectMapper();
try {
result = mapper.writeValueAsString(fileInfos);
} catch (Exception e) {
// do nothing
}
return result;
}
private File getLogFile(String logType, String logFile) {
String logPath = "";
if ("fe.audit.log".equals(logType)) {
logPath = Config.audit_log_dir + "/" + logFile;
}
return new File(logPath);
}
}

View File

@ -0,0 +1,78 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.util.SmallFileMgr;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import com.google.common.base.Strings;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@RestController
public class GetSmallFileAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(GetSmallFileAction.class);
@RequestMapping(path = "/api/get_small_file", method = RequestMethod.GET)
public Object execute(HttpServletRequest request, HttpServletResponse response) {
String token = request.getParameter("token");
String fileIdStr = request.getParameter("file_id");
// check param empty
if (Strings.isNullOrEmpty(token) || Strings.isNullOrEmpty(fileIdStr)) {
return ResponseEntityBuilder.badRequest("Missing parameter. Need token and file id");
}
// check token
if (!token.equals(Catalog.getCurrentCatalog().getToken())) {
return ResponseEntityBuilder.okWithCommonError("Invalid token");
}
long fileId = -1;
try {
fileId = Long.valueOf(fileIdStr);
} catch (NumberFormatException e) {
return ResponseEntityBuilder.badRequest("Invalid file id format: " + fileIdStr);
}
SmallFileMgr fileMgr = Catalog.getCurrentCatalog().getSmallFileMgr();
SmallFileMgr.SmallFile smallFile = fileMgr.getSmallFile(fileId);
if (smallFile == null || !smallFile.isContent) {
return ResponseEntityBuilder.okWithCommonError("File not found or is not content");
}
String method = request.getMethod();
if (method.equalsIgnoreCase("GET")) {
try {
getFile(request, response, smallFile.getContentBytes(), smallFile.name);
} catch (IOException e) {
return ResponseEntityBuilder.internalError(e.getMessage());
}
}
return ResponseEntityBuilder.ok();
}
}

View File

@ -0,0 +1,63 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import com.google.common.base.Strings;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@RestController
public class GetStreamLoadState extends RestBaseController {
@RequestMapping(path = "/api/{" + DB_KEY + "}/get_load_state", method = RequestMethod.GET)
public Object execute(@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
final String fullDbName = getFullDbName(dbName);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("unknown database, database=" + dbName);
}
String state = Catalog.getCurrentGlobalTransactionMgr().getLabelState(db.getId(), label).toString();
return ResponseEntityBuilder.ok(state);
}
}

View File

@ -0,0 +1,40 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
@RestController
public class HealthAction extends RestBaseController {
@RequestMapping(path = "/api/health", method = RequestMethod.GET)
public Object execute() {
Map<String, Object> result = new HashMap<>();
result.put("total_backend_num", Catalog.getCurrentSystemInfo().getBackendIds(false).size());
result.put("online_backend_num", Catalog.getCurrentSystemInfo().getBackendIds(true).size());
return ResponseEntityBuilder.ok(result);
}
}

View File

@ -0,0 +1,153 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import io.netty.handler.codec.http.HttpHeaderNames;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.DdlException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.service.ExecuteEnv;
import org.apache.doris.system.Backend;
import org.apache.doris.thrift.TNetworkAddress;
import com.google.common.base.Strings;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@RestController
public class LoadAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(LoadAction.class);
public static final String SUB_LABEL_NAME_PARAM = "sub_label";
private ExecuteEnv execEnv = ExecuteEnv.getInstance();
private boolean isStreamLoad = false;
@RequestMapping(path = "/api/{" + DB_KEY + "}/{" + TABLE_KEY + "}/_load", method = RequestMethod.PUT)
public Object load(HttpServletRequest request, HttpServletResponse response,
@PathVariable(value = DB_KEY) String db, @PathVariable(value = TABLE_KEY) String table)
throws DdlException {
this.isStreamLoad = false;
executeCheckPassword(request, response);
return executeWithoutPassword(request, response, db, table);
}
@RequestMapping(path = "/api/{" + DB_KEY + "}/{" + TABLE_KEY + "}/_stream_load", method = RequestMethod.PUT)
public Object streamLoad(HttpServletRequest request,
HttpServletResponse response,
@PathVariable(value = DB_KEY) String db, @PathVariable(value = TABLE_KEY) String table) {
this.isStreamLoad = true;
executeCheckPassword(request, response);
return executeWithoutPassword(request, response, db, table);
}
private Object executeWithoutPassword(HttpServletRequest request,
HttpServletResponse response, String db, String table) {
String dbName = db;
String tableName = table;
String urlStr = request.getRequestURI();
// A 'Load' request must have 100-continue header
if (request.getHeader(HttpHeaderNames.EXPECT.toString()) == null) {
return ResponseEntityBuilder.notFound("There is no 100-continue header");
}
final String clusterName = ConnectContext.get().getClusterName();
if (Strings.isNullOrEmpty(clusterName)) {
return ResponseEntityBuilder.badRequest("No cluster selected.");
}
if (Strings.isNullOrEmpty(dbName)) {
return ResponseEntityBuilder.badRequest("No database selected.");
}
if (Strings.isNullOrEmpty(tableName)) {
return ResponseEntityBuilder.badRequest("No table selected.");
}
String fullDbName = ClusterNamespace.getFullName(clusterName, dbName);
String label = request.getParameter(LABEL_KEY);
if (isStreamLoad) {
label = request.getHeader(LABEL_KEY);
}
if (!isStreamLoad && Strings.isNullOrEmpty(label)) {
// for stream load, the label can be generated by system automatically
return ResponseEntityBuilder.badRequest("No label selected.");
}
// check auth
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tableName, PrivPredicate.LOAD);
if (!isStreamLoad && !Strings.isNullOrEmpty(request.getParameter(SUB_LABEL_NAME_PARAM))) {
// only multi mini load need to redirect to Master, because only Master has the info of table to
// the Backend which the file exists.
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
}
// Choose a backend sequentially.
List<Long> backendIds = Catalog.getCurrentSystemInfo().seqChooseBackendIds(1, true, false, clusterName);
if (backendIds == null) {
return ResponseEntityBuilder.okWithCommonError("No backend alive.");
}
Backend backend = Catalog.getCurrentSystemInfo().getBackend(backendIds.get(0));
if (backend == null) {
return ResponseEntityBuilder.okWithCommonError("No backend alive.");
}
TNetworkAddress redirectAddr = new TNetworkAddress(backend.getHost(), backend.getHttpPort());
if (!isStreamLoad) {
String subLabel = request.getParameter(SUB_LABEL_NAME_PARAM);
if (!Strings.isNullOrEmpty(subLabel)) {
try {
redirectAddr = execEnv.getMultiLoadMgr().redirectAddr(fullDbName, label, tableName, redirectAddr);
} catch (DdlException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
}
}
LOG.info("redirect load action to destination={}, stream: {}, db: {}, tbl: {}, label: {}",
redirectAddr.toString(), isStreamLoad, dbName, tableName, label);
RedirectView redirectView = redirectTo(request, redirectAddr);
return redirectView;
}
}

View File

@ -0,0 +1,333 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.OlapTable;
import org.apache.doris.catalog.Table;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.FeConstants;
import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.proc.ProcNodeInterface;
import org.apache.doris.common.proc.ProcResult;
import org.apache.doris.common.proc.ProcService;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.exception.BadRequestException;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.system.SystemInfoService;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* And meta info like databases, tables and schema
*/
@RestController
public class MetaInfoAction extends RestBaseController {
private static final String NAMESPACES = "namespaces";
private static final String DATABASES = "databases";
private static final String TABLES = "tables";
private static final String PARAM_LIMIT = "limit";
private static final String PARAM_OFFSET = "offset";
private static final String PARAM_WITH_MV = "with_mv";
/**
* Get all databases
* {
* "msg": "success",
* "code": 0,
* "data": [
* "default_cluster:db1",
* "default_cluster:doris_audit_db__",
* "default_cluster:information_schema"
* ],
* "count": 0
* }
*/
@RequestMapping(path = "/api/meta/" + NAMESPACES + "/{" + NS_KEY + "}/" + DATABASES,
method = {RequestMethod.GET})
public Object getAllDatabases(
@PathVariable(value = NS_KEY) String ns,
HttpServletRequest request, HttpServletResponse response) {
checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
// 1. get all database with priviledge
List<String> dbNames = null;
try {
dbNames = Catalog.getCurrentCatalog().getClusterDbNames(ns);
} catch (AnalysisException e) {
return ResponseEntityBuilder.okWithCommonError("namespace does not exist: " + ns);
}
List<String> dbNameSet = Lists.newArrayList();
for (String fullName : dbNames) {
final String db = ClusterNamespace.getNameFromFullName(fullName);
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), fullName,
PrivPredicate.SHOW)) {
continue;
}
dbNameSet.add(db);
}
Collections.sort(dbNames);
// handle limit offset
Pair<Integer, Integer> fromToIndex = getFromToIndex(request, dbNames.size());
return ResponseEntityBuilder.ok(dbNames.subList(fromToIndex.first, fromToIndex.second));
}
/** Get all tables of a database
* {
* "msg": "success",
* "code": 0,
* "data": [
* "tbl1",
* "tbl2"
* ],
* "count": 0
* }
*/
@RequestMapping(path = "/api/meta/" + NAMESPACES + "/{" + NS_KEY + "}/" + DATABASES + "/{" + DB_KEY + "}/" + TABLES,
method = {RequestMethod.GET})
public Object getTables(
@PathVariable(value = NS_KEY) String ns, @PathVariable(value = DB_KEY) String dbName,
HttpServletRequest request, HttpServletResponse response) {
checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
String fullDbName = getFullDbName(dbName);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database does not exist: " + fullDbName);
}
List<String> tblNames = Lists.newArrayList();
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), fullDbName, tbl.getName(),
PrivPredicate.SHOW)) {
continue;
}
tblNames.add(tbl.getName());
}
} finally {
db.readUnlock();
}
Collections.sort(tblNames);
// handle limit offset
Pair<Integer, Integer> fromToIndex = getFromToIndex(request, tblNames.size());
return ResponseEntityBuilder.ok(tblNames.subList(fromToIndex.first, fromToIndex.second));
}
/** Get schema of a table
* {
* "msg": "success",
* "code": 0,
* "data": {
* "tbl1": {
* "schema": [{
* "Field": "k1",
* "Type": "INT",
* "Null": "Yes",
* "Extra": "",
* "Default": null,
* "Key": "true"
* }, {
* "Field": "k2",
* "Type": "INT",
* "Null": "Yes",
* "Extra": "",
* "Default": null,
* "Key": "true"
* }],
* "is_base": true
* },
* "r1": {
* "schema": [{
* "Field": "k1",
* "Type": "INT",
* "Null": "Yes",
* "Extra": "",
* "Default": null,
* "Key": "true"
* }],
* "is_base": false
* }
* },
* "count": 0
* }
*/
@RequestMapping(path = "/api/meta/" + NAMESPACES + "/{" + NS_KEY + "}/" + DATABASES + "/{" + DB_KEY + "}/" + TABLES
+ "/{" + TABLE_KEY + "}/schema",
method = {RequestMethod.GET})
public Object getTableSchema(
@PathVariable(value = NS_KEY) String ns, @PathVariable(value = DB_KEY) String dbName,
@PathVariable(value = TABLE_KEY) String tblName,
HttpServletRequest request, HttpServletResponse response) throws UserException {
checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
String fullDbName = getFullDbName(dbName);
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.SHOW);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database does not exist: " + fullDbName);
}
String withMvPara = request.getParameter(PARAM_WITH_MV);
boolean withMv = Strings.isNullOrEmpty(withMvPara) ? false : withMvPara.equals("1");
// get all proc paths
Map<String, Map<String, Object>> result = Maps.newHashMap();
db.readLock();
try {
Table tbl = db.getTable(tblName);
if (tbl == null) {
return ResponseEntityBuilder.okWithCommonError("Table does not exist: " + tblName);
}
long baseId = -1;
if (tbl.getType() == Table.TableType.OLAP) {
baseId = ((OlapTable) tbl).getBaseIndexId();
} else {
baseId += tbl.getId();
}
String procPath = Joiner.on("/").join("", "dbs", db.getId(), tbl.getId(), "index_schema/", baseId);
generateResult(tblName, true, procPath, result);
if (withMv && tbl.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) tbl;
for (long indexId : olapTable.getIndexIdListExceptBaseIndex()) {
procPath = Joiner.on("/").join("", "dbs", db.getId(), tbl.getId(), "index_schema/", indexId);
generateResult(olapTable.getIndexNameById(indexId), false, procPath, result);
}
}
} finally {
db.readUnlock();
}
return ResponseEntityBuilder.ok(result);
}
private void generateResult(String indexName, boolean isBaseIndex, String procPath,
Map<String, Map<String, Object>> result) throws UserException {
Map<String, Object> propMap = result.get(indexName);
if (propMap == null) {
propMap = Maps.newHashMap();
result.put(indexName, propMap);
}
propMap.put("is_base", isBaseIndex);
propMap.put("schema", generateSchema(procPath));
}
List<Map<String, String>> generateSchema(String procPath) throws UserException {
ProcNodeInterface node = ProcService.getInstance().open(procPath);
if (node == null) {
throw new DdlException("get schema with proc path failed: " + procPath);
}
List<Map<String, String>> schema = Lists.newArrayList();
ProcResult procResult = node.fetchResult();
List<String> colNames = procResult.getColumnNames();
List<List<String>> rows = procResult.getRows();
for (List<String> row : rows) {
Preconditions.checkState(row.size() == colNames.size());
Map<String, String> fieldMap = Maps.newHashMap();
for (int i = 0; i < row.size(); i++) {
fieldMap.put(colNames.get(i), convertIfNull(row.get(i)));
}
schema.add(fieldMap);
}
return schema;
}
private String convertIfNull(String val) {
return val.equals(FeConstants.null_string) ? null : val;
}
// get limit and offset from query parameter
// and return fromIndex and toIndex of a list
private Pair<Integer, Integer> getFromToIndex(HttpServletRequest request, int maxNum) {
String limitStr = request.getParameter(PARAM_LIMIT);
String offsetStr = request.getParameter(PARAM_OFFSET);
int offset = 0;
int limit = Integer.MAX_VALUE;
if (Strings.isNullOrEmpty(limitStr)) {
// limit not set
if (!Strings.isNullOrEmpty(offsetStr)) {
throw new BadRequestException("Param offset should be set with param limit");
}
} else {
// limit is set
limit = Integer.valueOf(limitStr);
if (limit < 0) {
throw new BadRequestException("Param limit should >= 0");
}
offset = 0;
if (!Strings.isNullOrEmpty(offsetStr)) {
offset = Integer.valueOf(offsetStr);
if (offset < 0) {
throw new BadRequestException("Param offset should >= 0");
}
}
}
if (maxNum <= 0) {
return Pair.create(0, 0);
}
return Pair.create(Math.min(offset, maxNum - 1), Math.min(limit + offset, maxNum));
}
}

View File

@ -0,0 +1,51 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/*
* used to get meta replay info
* eg:
* fe_host:http_port/api/_meta_replay_state
*/
@RestController
public class MetaReplayerCheckAction extends RestBaseController {
@RequestMapping(path = "/api/_meta_replay_state", method = RequestMethod.GET)
public Object execute(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
Map<String, String> resultMap = Catalog.getCurrentCatalog().getMetaReplayState().getInfo();
return ResponseEntityBuilder.ok(resultMap);
}
}

View File

@ -0,0 +1,62 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.metric.JsonMetricVisitor;
import org.apache.doris.metric.MetricRepo;
import org.apache.doris.metric.MetricVisitor;
import org.apache.doris.metric.PrometheusMetricVisitor;
import org.apache.doris.metric.SimpleCoreMetricVisitor;
import com.google.common.base.Strings;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
//fehost:port/metrics
//fehost:port/metrics?type=core
@RestController
public class MetricsAction {
private static final String TYPE_PARAM = "type";
@RequestMapping(path = "/metrics")
public void execute(HttpServletRequest request, HttpServletResponse response) {
String type = request.getParameter(TYPE_PARAM);
MetricVisitor visitor = null;
if (!Strings.isNullOrEmpty(type) && type.equalsIgnoreCase("core")) {
visitor = new SimpleCoreMetricVisitor("doris_fe");
} else if (!Strings.isNullOrEmpty(type) && type.equalsIgnoreCase("agent")) {
visitor = new JsonMetricVisitor("doris_fe");
} else {
visitor = new PrometheusMetricVisitor("doris_fe");
}
response.setContentType("text/plain");
try {
response.getWriter().write(MetricRepo.getMetric(visitor));
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,223 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.analysis.LoadStmt;
import org.apache.doris.common.DdlException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.service.ExecuteEnv;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
// List all labels of one multi-load
@RestController
public class MultiAction extends RestBaseController {
private ExecuteEnv execEnv;
private static final String SUB_LABEL_KEY = "sub_label";
@RequestMapping(path = "/api/{" + DB_KEY + "}/_multi_desc", method = RequestMethod.POST)
public Object multi_desc(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
execEnv = ExecuteEnv.getInstance();
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
String fullDbName = getFullDbName(dbName);
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, PrivPredicate.LOAD);
// only Master has these load info
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
execEnv = ExecuteEnv.getInstance();
final List<String> labels = Lists.newArrayList();
execEnv.getMultiLoadMgr().desc(fullDbName, label, labels);
return ResponseEntityBuilder.ok(labels);
}
@RequestMapping(path = "/api/{" + DB_KEY + "}/_multi_list", method = RequestMethod.POST)
public Object multi_list(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
execEnv = ExecuteEnv.getInstance();
String fullDbName = getFullDbName(dbName);
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, PrivPredicate.LOAD);
// only Master has these load info
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
final List<String> labels = Lists.newArrayList();
execEnv.getMultiLoadMgr().list(fullDbName, labels);
return ResponseEntityBuilder.ok(labels);
}
@RequestMapping(path = "/api/{" + DB_KEY + "}/_multi_start", method = RequestMethod.POST)
public Object multi_start(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
execEnv = ExecuteEnv.getInstance();
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
String fullDbName = getFullDbName(dbName);
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, PrivPredicate.LOAD);
// Mutli start request must redirect to master, because all following sub requests will be handled
// on Master
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
Map<String, String> properties = Maps.newHashMap();
String[] keys = {LoadStmt.TIMEOUT_PROPERTY, LoadStmt.MAX_FILTER_RATIO_PROPERTY};
for (String key : keys) {
String value = request.getParameter(key);
if (!Strings.isNullOrEmpty(value)) {
properties.put(key, value);
}
}
execEnv.getMultiLoadMgr().startMulti(fullDbName, label, properties);
return ResponseEntityBuilder.ok();
}
@RequestMapping(path = "/api/{" + DB_KEY + "}/_multi_unload", method = RequestMethod.POST)
public Object multi_unload(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
execEnv = ExecuteEnv.getInstance();
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
String subLabel = request.getParameter(SUB_LABEL_KEY);
if (Strings.isNullOrEmpty(subLabel)) {
return ResponseEntityBuilder.badRequest("No sub label selected");
}
String fullDbName = getFullDbName(dbName);
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, PrivPredicate.LOAD);
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
execEnv.getMultiLoadMgr().unload(fullDbName, label, subLabel);
return ResponseEntityBuilder.ok();
}
@RequestMapping(path = "/api/{" + DB_KEY + "}/_multi_commit", method = RequestMethod.POST)
public Object multi_commit(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
execEnv = ExecuteEnv.getInstance();
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
String fullDbName = getFullDbName(dbName);
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, PrivPredicate.LOAD);
// only Master has these load info
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
execEnv.getMultiLoadMgr().commit(fullDbName, label);
return ResponseEntityBuilder.ok();
}
@RequestMapping(path = "/api/{" + DB_KEY + "}/_multi_abort", method = RequestMethod.POST)
public Object multi_abort(
@PathVariable(value = DB_KEY) final String dbName,
HttpServletRequest request, HttpServletResponse response)
throws DdlException {
executeCheckPassword(request, response);
execEnv = ExecuteEnv.getInstance();
String label = request.getParameter(LABEL_KEY);
if (Strings.isNullOrEmpty(label)) {
return ResponseEntityBuilder.badRequest("No label selected");
}
String fullDbName = getFullDbName(dbName);
checkDbAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, PrivPredicate.LOAD);
// only Master has these load info
RedirectView redirectView = redirectToMaster(request, response);
if (redirectView != null) {
return redirectView;
}
execEnv.getMultiLoadMgr().abort(fullDbName, label);
return ResponseEntityBuilder.ok();
}
}

View File

@ -0,0 +1,66 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.common.util.ProfileManager;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
// This class is a RESTFUL interface to get query profile.
// It will be used in query monitor to collect profiles.
// Usage:
// wget http://fe_host:fe_http_port/api/profile?query_id=123456
@RestController
public class ProfileAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(ProfileAction.class);
@RequestMapping(path = "/api/profile", method = RequestMethod.GET)
protected Object profile(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String queryId = request.getParameter("query_id");
if (Strings.isNullOrEmpty(queryId)) {
return ResponseEntityBuilder.badRequest("Missing query_id");
}
String queryProfileStr = ProfileManager.getInstance().getProfile(queryId);
if (queryProfileStr == null) {
return ResponseEntityBuilder.okWithCommonError("query id " + queryId + " not found.");
}
Map<String, String> result = Maps.newHashMap();
result.put("profile", queryProfileStr);
return ResponseEntityBuilder.ok(result);
}
}

View File

@ -0,0 +1,66 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.qe.QueryDetail;
import org.apache.doris.qe.QueryDetailQueue;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
// This class is used to get current query_id of connection_id.
// Every connection holds at most one query at every point.
// Some we can get query_id firstly, and get query by query_id.
@RestController
public class QueryDetailAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(QueryDetailAction.class);
@RequestMapping(path = "/api/query_detail", method = RequestMethod.GET)
protected Object query_detail(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String eventTimeStr = request.getParameter("event_time");
if (Strings.isNullOrEmpty(eventTimeStr)) {
return ResponseEntityBuilder.badRequest("Missing event_time");
}
long eventTime = Long.valueOf(eventTimeStr.trim());
List<QueryDetail> queryDetails = QueryDetailQueue.getQueryDetails(eventTime);
Map<String, List<QueryDetail>> result = Maps.newHashMap();
result.put("query_details", queryDetails);
return ResponseEntityBuilder.ok(result);
}
}

View File

@ -0,0 +1,29 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
public enum RestApiStatusCode {
OK(0),
COMMON_ERROR(1);
public int code;
RestApiStatusCode(int code) {
this.code = code;
}
}

View File

@ -0,0 +1,157 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.analysis.UserIdentity;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.httpv2.controller.BaseController;
import org.apache.doris.httpv2.exception.UnauthorizedException;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.system.SystemInfoService;
import org.apache.doris.thrift.TNetworkAddress;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.servlet.view.RedirectView;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
public class RestBaseController extends BaseController {
protected static final String NS_KEY = "ns";
protected static final String DB_KEY = "db";
protected static final String TABLE_KEY = "table";
protected static final String LABEL_KEY = "label";
private static final Logger LOG = LogManager.getLogger(RestBaseController.class);
public ActionAuthorizationInfo executeCheckPassword(HttpServletRequest request,
HttpServletResponse response) throws UnauthorizedException {
ActionAuthorizationInfo authInfo = getAuthorizationInfo(request);
// check password
UserIdentity currentUser = checkPassword(authInfo);
ConnectContext ctx = new ConnectContext(null);
ctx.setCatalog(Catalog.getCurrentCatalog());
ctx.setQualifiedUser(authInfo.fullUserName);
ctx.setRemoteIP(authInfo.remoteIp);
ctx.setCurrentUserIdentity(currentUser);
ctx.setCluster(authInfo.cluster);
ctx.setThreadLocalInfo();
return authInfo;
}
public RedirectView redirectTo(HttpServletRequest request, TNetworkAddress addr) {
URI urlObj = null;
URI resultUriObj = null;
String urlStr = request.getRequestURI();
try {
urlObj = new URI(urlStr);
resultUriObj = new URI("http", null, addr.getHostname(),
addr.getPort(), urlObj.getPath(), "", null);
} catch (Exception e) {
throw new RuntimeException(e);
}
String redirectUrl = resultUriObj.toASCIIString();
if (!Strings.isNullOrEmpty(request.getQueryString())) {
redirectUrl += request.getQueryString();
}
LOG.info("redirect url: {}", redirectUrl);
RedirectView redirectView = new RedirectView(redirectUrl);
redirectView.setContentType("text/html;charset=utf-8");
redirectView.setStatusCode(org.springframework.http.HttpStatus.TEMPORARY_REDIRECT);
return redirectView;
}
public RedirectView redirectToMaster(HttpServletRequest request, HttpServletResponse response) {
Catalog catalog = Catalog.getCurrentCatalog();
if (catalog.isMaster()) {
return null;
}
RedirectView redirectView = redirectTo(request, new TNetworkAddress(catalog.getMasterIp(), catalog.getMasterHttpPort()));
return redirectView;
}
public void getFile(HttpServletRequest request, HttpServletResponse response, Object obj, String fileName)
throws IOException {
response.setHeader("Content-type", "application/octet-stream");
response.addHeader("Content-Disposition", "attachment;fileName=" + fileName); // set file name
if (obj instanceof File) {
File file = (File) obj;
byte[] buffer = new byte[1024];
FileInputStream fis = null;
BufferedInputStream bis = null;
try {
fis = new FileInputStream(file);
bis = new BufferedInputStream(fis);
OutputStream os = response.getOutputStream();
int i = bis.read(buffer);
while (i != -1) {
os.write(buffer, 0, i);
i = bis.read(buffer);
}
return;
} finally {
if (bis != null) {
try {
bis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
} else if (obj instanceof byte[]) {
OutputStream os = response.getOutputStream();
os.write((byte[]) obj);
}
}
public void writeFileResponse(HttpServletRequest request, HttpServletResponse response, File imageFile) throws IOException {
Preconditions.checkArgument(imageFile != null && imageFile.exists());
response.setHeader("Content-type", "application/octet-stream");
response.addHeader("Content-Disposition", "attachment;fileName=" + imageFile.getName());
response.setHeader("X-Image-Size", imageFile.length() + "");
getFile(request, response, imageFile, imageFile.getName());
}
public String getFullDbName(String dbName) {
String fullDbName = dbName;
String clusterName = ClusterNamespace.getClusterNameFromFullName(fullDbName);
if (clusterName == null) {
fullDbName = ClusterNamespace.getFullName(SystemInfoService.DEFAULT_CLUSTER, dbName);
}
return fullDbName;
}
}

View File

@ -0,0 +1,110 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.MaterializedIndex;
import org.apache.doris.catalog.MaterializedIndex.IndexExtState;
import org.apache.doris.catalog.OlapTable;
import org.apache.doris.catalog.Partition;
import org.apache.doris.catalog.Replica;
import org.apache.doris.catalog.Table;
import org.apache.doris.catalog.Table.TableType;
import org.apache.doris.catalog.Tablet;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Map;
/*
* calc row count from replica to table
* fe_host:fe_http_port/api/rowcount?db=dbname&table=tablename
*/
@RestController
public class RowCountAction extends RestBaseController {
@RequestMapping(path = "/api/rowcount", method = RequestMethod.GET)
protected Object rowcount(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String dbName = request.getParameter(DB_KEY);
if (Strings.isNullOrEmpty(dbName)) {
return ResponseEntityBuilder.badRequest("No database selected");
}
String tableName = request.getParameter(TABLE_KEY);
if (Strings.isNullOrEmpty(tableName)) {
return ResponseEntityBuilder.badRequest("No table selected");
}
String fullDbName = getFullDbName(dbName);
Map<String, Long> indexRowCountMap = Maps.newHashMap();
Catalog catalog = Catalog.getCurrentCatalog();
Database db = catalog.getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database[" + fullDbName + "] does not exist");
}
db.writeLock();
try {
Table table = db.getTable(tableName);
if (table == null) {
return ResponseEntityBuilder.okWithCommonError("Table[" + tableName + "] does not exist");
}
if (table.getType() != TableType.OLAP) {
return ResponseEntityBuilder.okWithCommonError("Table[" + tableName + "] is not OLAP table");
}
OlapTable olapTable = (OlapTable) table;
for (Partition partition : olapTable.getAllPartitions()) {
long version = partition.getVisibleVersion();
long versionHash = partition.getVisibleVersionHash();
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.VISIBLE)) {
long indexRowCount = 0L;
for (Tablet tablet : index.getTablets()) {
long tabletRowCount = 0L;
for (Replica replica : tablet.getReplicas()) {
if (replica.checkVersionCatchUp(version, versionHash, false)
&& replica.getRowCount() > tabletRowCount) {
tabletRowCount = replica.getRowCount();
}
}
indexRowCount += tabletRowCount;
} // end for tablets
index.setRowCount(indexRowCount);
indexRowCountMap.put(olapTable.getIndexNameById(index.getId()), indexRowCount);
} // end for indices
} // end for partitions
} finally {
db.writeUnlock();
}
return ResponseEntityBuilder.ok(indexRowCountMap);
}
}

View File

@ -0,0 +1,107 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.common.ConfigBase;
import org.apache.doris.common.ConfigBase.ConfField;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.collect.Maps;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.lang.reflect.Field;
import java.util.Map;
/*
* used to set fe config
* eg:
* fe_host:http_port/api/_set_config?config_key1=config_value1&config_key2=config_value2&...
*/
@RestController
public class SetConfigAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(SetConfigAction.class);
@RequestMapping(path = "/api/_set_config", method = RequestMethod.GET)
protected Object set_config(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
Map<String, String[]> configs = request.getParameterMap();
Map<String, String> setConfigs = Maps.newHashMap();
Map<String, String> errConfigs = Maps.newHashMap();
LOG.debug("get config from url: {}", configs);
Field[] fields = ConfigBase.confClass.getFields();
for (Field f : fields) {
// ensure that field has "@ConfField" annotation
ConfField anno = f.getAnnotation(ConfField.class);
if (anno == null || !anno.mutable()) {
continue;
}
if (anno.masterOnly() && !Catalog.getCurrentCatalog().isMaster()) {
continue;
}
// ensure that field has property string
String confKey = anno.value().equals("") ? f.getName() : anno.value();
String[] confVals = configs.get(confKey);
if (confVals == null || confVals.length == 0) {
continue;
}
if (confVals.length > 1) {
continue;
}
try {
ConfigBase.setConfigField(f, confVals[0]);
} catch (Exception e) {
LOG.warn("failed to set config {}:{}, {}", confKey, confVals[0], e.getMessage());
continue;
}
setConfigs.put(confKey, confVals[0]);
}
for (String key : configs.keySet()) {
if (!setConfigs.containsKey(key)) {
String[] confVals = configs.get(key);
String confVal = confVals.length == 1 ? confVals[0] : "invalid value";
errConfigs.put(key, confVal);
}
}
Map<String, Map<String, String>> resultMap = Maps.newHashMap();
resultMap.put("set", setConfigs);
resultMap.put("err", errConfigs);
return ResponseEntityBuilder.ok(resultMap);
}
}

View File

@ -0,0 +1,284 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.OlapTable;
import org.apache.doris.catalog.Table;
import org.apache.doris.catalog.Table.TableType;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.Config;
import org.apache.doris.common.proc.ProcNodeInterface;
import org.apache.doris.common.proc.ProcResult;
import org.apache.doris.common.proc.ProcService;
import org.apache.doris.ha.HAProtocol;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.persist.Storage;
import org.apache.doris.qe.ConnectContext;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@RestController
public class ShowAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(ShowAction.class);
private enum Action {
SHOW_DB_SIZE,
SHOW_HA,
INVALID;
public static Action getAction(String str) {
try {
return valueOf(str);
} catch (Exception ex) {
return INVALID;
}
}
}
@RequestMapping(path = "/api/show_meta_info", method = RequestMethod.GET)
public Object show_meta_info(HttpServletRequest request, HttpServletResponse response) {
String action = request.getParameter("action");
if (Strings.isNullOrEmpty(action)) {
return ResponseEntityBuilder.badRequest("Missing action parameter");
}
switch (Action.getAction(action.toUpperCase())) {
case SHOW_DB_SIZE:
return ResponseEntityBuilder.ok(getDataSize());
case SHOW_HA:
try {
return ResponseEntityBuilder.ok(getHaInfo());
} catch (IOException e) {
return ResponseEntityBuilder.internalError(e.getMessage());
}
default:
return ResponseEntityBuilder.badRequest("Unknown action: " + action);
}
}
// Format:
//http://username:password@192.168.1.1:8030/api/show_proc?path=/
@RequestMapping(path = "/api/show_proc", method = RequestMethod.GET)
public Object show_proc(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
// check authority
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String path = request.getParameter("path");
String forward = request.getParameter("forward");
boolean isForward = false;
if (!Strings.isNullOrEmpty(forward) && forward.equals("true")) {
isForward = true;
}
// forward to master if necessary
if (!Catalog.getCurrentCatalog().isMaster() && isForward) {
RedirectView redirectView = redirectToMaster(request, response);
Preconditions.checkNotNull(redirectView);
return redirectView;
} else {
ProcNodeInterface procNode = null;
ProcService instance = ProcService.getInstance();
try {
if (Strings.isNullOrEmpty(path)) {
procNode = instance.open("/");
} else {
procNode = instance.open(path);
}
} catch (AnalysisException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
if (procNode != null) {
ProcResult result;
try {
result = procNode.fetchResult();
List<List<String>> rows = result.getRows();
return ResponseEntityBuilder.ok(rows);
} catch (AnalysisException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
} else {
return ResponseEntityBuilder.badRequest("Invalid proc path: " + path);
}
}
}
@RequestMapping(path = "/api/show_runtime_info", method = RequestMethod.GET)
public Object show_runtime_info(HttpServletRequest request, HttpServletResponse response) {
HashMap<String, String> feInfo = new HashMap<String, String>();
// Get memory info
Runtime r = Runtime.getRuntime();
feInfo.put("free_mem", String.valueOf(r.freeMemory()));
feInfo.put("total_mem", String.valueOf(r.totalMemory()));
feInfo.put("max_mem", String.valueOf(r.maxMemory()));
// Get thread count
ThreadGroup parentThread;
for (parentThread = Thread.currentThread().getThreadGroup();
parentThread.getParent() != null;
parentThread = parentThread.getParent()) {
}
;
feInfo.put("thread_cnt", String.valueOf(parentThread.activeCount()));
return ResponseEntityBuilder.ok(feInfo);
}
@RequestMapping(path = "/api/show_data", method = RequestMethod.GET)
public Object show_data(HttpServletRequest request, HttpServletResponse response) {
Map<String, Long> oneEntry = Maps.newHashMap();
String dbName = request.getParameter(DB_KEY);
ConcurrentHashMap<String, Database> fullNameToDb = Catalog.getCurrentCatalog().getFullNameToDb();
long totalSize = 0;
if (dbName != null) {
String fullDbName = getFullDbName(dbName);
Database db = fullNameToDb.get(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("database " + fullDbName + " not found.");
}
totalSize = getDataSizeOfDatabase(db);
oneEntry.put(fullDbName, totalSize);
} else {
for (Database db : fullNameToDb.values()) {
if (db.isInfoSchemaDb()) {
continue;
}
totalSize += getDataSizeOfDatabase(db);
}
oneEntry.put("__total_size", totalSize);
}
return ResponseEntityBuilder.ok(oneEntry);
}
private Map<String, String> getHaInfo() throws IOException {
HashMap<String, String> feInfo = new HashMap<String, String>();
feInfo.put("role", Catalog.getCurrentCatalog().getFeType().toString());
if (Catalog.getCurrentCatalog().isMaster()) {
feInfo.put("current_journal_id",
String.valueOf(Catalog.getCurrentCatalog().getEditLog().getMaxJournalId()));
} else {
feInfo.put("current_journal_id",
String.valueOf(Catalog.getCurrentCatalog().getReplayedJournalId()));
}
HAProtocol haProtocol = Catalog.getCurrentCatalog().getHaProtocol();
if (haProtocol != null) {
InetSocketAddress master = null;
try {
master = haProtocol.getLeader();
} catch (Exception e) {
// this may happen when majority of FOLLOWERS are down and no MASTER right now.
LOG.warn("failed to get leader: {}", e.getMessage());
}
if (master != null) {
feInfo.put("master", master.getHostString());
} else {
feInfo.put("master", "unknown");
}
List<InetSocketAddress> electableNodes = haProtocol.getElectableNodes(false);
ArrayList<String> electableNodeNames = new ArrayList<String>();
if (electableNodes != null) {
for (InetSocketAddress node : electableNodes) {
electableNodeNames.add(node.getHostString());
}
feInfo.put("electable_nodes", StringUtils.join(electableNodeNames.toArray(), ","));
}
List<InetSocketAddress> observerNodes = haProtocol.getObserverNodes();
ArrayList<String> observerNodeNames = new ArrayList<String>();
if (observerNodes != null) {
for (InetSocketAddress node : observerNodes) {
observerNodeNames.add(node.getHostString());
}
feInfo.put("observer_nodes", StringUtils.join(observerNodeNames.toArray(), ","));
}
}
feInfo.put("can_read", String.valueOf(Catalog.getCurrentCatalog().canRead()));
feInfo.put("is_ready", String.valueOf(Catalog.getCurrentCatalog().isReady()));
Storage storage = new Storage(Config.meta_dir + "/image");
feInfo.put("last_checkpoint_version", String.valueOf(storage.getImageSeq()));
long lastCheckpointTime = storage.getCurrentImageFile().lastModified();
feInfo.put("last_checkpoint_time", String.valueOf(lastCheckpointTime));
return feInfo;
}
public long getDataSizeOfDatabase(Database db) {
long totalSize = 0;
db.readLock();
try {
// sort by table name
List<Table> tables = db.getTables();
for (Table table : tables) {
if (table.getType() != TableType.OLAP) {
continue;
}
long tableSize = ((OlapTable) table).getDataSize();
totalSize += tableSize;
} // end for tables
} finally {
db.readUnlock();
}
return totalSize;
}
private Map<String, Long> getDataSize() {
Map<String, Long> result = new HashMap<String, Long>();
List<String> dbNames = Catalog.getCurrentCatalog().getDbNames();
for (int i = 0; i < dbNames.size(); i++) {
String dbName = dbNames.get(i);
Database db = Catalog.getCurrentCatalog().getDb(dbName);
long totalSize = getDataSizeOfDatabase(db);
result.put(dbName, Long.valueOf(totalSize));
} // end for dbs
return result;
}
}

View File

@ -0,0 +1,126 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.common.DdlException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.util.ExecutionResultSet;
import org.apache.doris.httpv2.util.StatementSubmitter;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.system.SystemInfoService;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.lang.reflect.Type;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
* For execute stmt via http
*/
@RestController
public class StmtExecutionAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(StmtExecutionAction.class);
private static StatementSubmitter stmtSubmitter = new StatementSubmitter();
private static final String PARAM_SYNC = "sync";
private static final String PARAM_LIMIT = "limit";
private static final long DEFAULT_ROW_LIMIT = 1000;
private static final long MAX_ROW_LIMIT = 10000;
/**
* Execute a SQL.
* Request body:
* {
* "stmt" : "select * from tbl1"
* }
*/
@RequestMapping(path = "/api/query/{" + NS_KEY + "}/{" + DB_KEY + "}", method = {RequestMethod.POST})
public Object exeuteSQL(
@PathVariable(value = NS_KEY) String ns,
@PathVariable(value = DB_KEY) String dbName,
HttpServletRequest request, HttpServletResponse response,
@RequestBody String stmtBody) throws DdlException {
ActionAuthorizationInfo authInfo = checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
boolean isSync = true;
String syncParam = request.getParameter(PARAM_SYNC);
if (!Strings.isNullOrEmpty(syncParam)) {
isSync = syncParam.equals("1");
}
String limitParam = request.getParameter(PARAM_LIMIT);
long limit = DEFAULT_ROW_LIMIT;
if (!Strings.isNullOrEmpty(limitParam)) {
limit = Math.min(Long.valueOf(limitParam), MAX_ROW_LIMIT);
}
Type type = new TypeToken<StmtRequestBody>() {
}.getType();
StmtRequestBody stmtRequestBody = new Gson().fromJson(stmtBody, type);
if (Strings.isNullOrEmpty(stmtRequestBody.stmt)) {
return ResponseEntityBuilder.badRequest("Missing statement request body");
}
LOG.info("stmt: {}", stmtRequestBody.stmt);
ConnectContext.get().setDatabase(getFullDbName(dbName));
// 2. Submit stmt
StatementSubmitter.StmtContext stmtCtx = new StatementSubmitter.StmtContext(
stmtRequestBody.stmt, authInfo.fullUserName, authInfo.password, limit
);
Future<ExecutionResultSet> future = stmtSubmitter.submit(stmtCtx);
if (isSync) {
try {
ExecutionResultSet resultSet = future.get();
return ResponseEntityBuilder.ok(resultSet.getResult());
} catch (InterruptedException e) {
LOG.warn("failed to execute stmt", e);
return ResponseEntityBuilder.okWithCommonError("Failed to execute sql: " + e.getMessage());
} catch (ExecutionException e) {
LOG.warn("failed to execute stmt", e);
return ResponseEntityBuilder.okWithCommonError("Failed to execute sql: " + e.getMessage());
}
} else {
return ResponseEntityBuilder.okWithCommonError("Not support async query execution");
}
}
private static class StmtRequestBody {
public String stmt;
}
}

View File

@ -0,0 +1,86 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.MaterializedIndexMeta;
import org.apache.doris.catalog.OlapTable;
import org.apache.doris.catalog.Table;
import org.apache.doris.catalog.Table.TableType;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.thrift.TStorageType;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
import java.util.Map;
@RestController
public class StorageTypeCheckAction extends RestBaseController {
@RequestMapping(path = "/api/_check_storagetype", method = RequestMethod.GET)
protected Object check_storagetype(HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
checkGlobalAuth(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN);
String dbName = request.getParameter(DB_KEY);
if (Strings.isNullOrEmpty(dbName)) {
return ResponseEntityBuilder.badRequest("No database selected");
}
String fullDbName = getFullDbName(dbName);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.badRequest("Database " + dbName + " does not exist");
}
Map<String, Map<String, String>> result = Maps.newHashMap();
db.readLock();
try {
List<Table> tbls = db.getTables();
for (Table tbl : tbls) {
if (tbl.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTbl = (OlapTable) tbl;
Map<String, String> indexMap = Maps.newHashMap();
for (Map.Entry<Long, MaterializedIndexMeta> entry : olapTbl.getIndexIdToMeta().entrySet()) {
MaterializedIndexMeta indexMeta = entry.getValue();
if (indexMeta.getStorageType() == TStorageType.ROW) {
indexMap.put(olapTbl.getIndexNameById(entry.getKey()), indexMeta.getStorageType().name());
}
}
result.put(tbl.getName(), indexMap);
}
} finally {
db.readUnlock();
}
return ResponseEntityBuilder.ok(result);
}
}

View File

@ -0,0 +1,292 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import io.netty.handler.codec.http.HttpResponseStatus;
import org.apache.doris.analysis.InlineViewRef;
import org.apache.doris.analysis.SelectStmt;
import org.apache.doris.analysis.StatementBase;
import org.apache.doris.analysis.TableName;
import org.apache.doris.analysis.TableRef;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.Table;
import org.apache.doris.common.DorisHttpException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.util.HttpUtil;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.planner.PlanFragment;
import org.apache.doris.planner.Planner;
import org.apache.doris.planner.ScanNode;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.qe.OriginStatement;
import org.apache.doris.qe.StmtExecutor;
import org.apache.doris.thrift.TDataSink;
import org.apache.doris.thrift.TDataSinkType;
import org.apache.doris.thrift.TMemoryScratchSink;
import org.apache.doris.thrift.TNetworkAddress;
import org.apache.doris.thrift.TPaloScanRange;
import org.apache.doris.thrift.TPlanFragment;
import org.apache.doris.thrift.TQueryOptions;
import org.apache.doris.thrift.TQueryPlanInfo;
import org.apache.doris.thrift.TScanRangeLocations;
import org.apache.doris.thrift.TTabletVersionInfo;
import org.apache.doris.thrift.TUniqueId;
import com.google.common.base.Strings;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* This class responsible for parse the sql and generate the query plan fragment for a (only one) table{@see OlapTable}
* the related tablet maybe pruned by query planer according the `where` predicate.
*/
@RestController
public class TableQueryPlanAction extends RestBaseController {
public static final Logger LOG = LogManager.getLogger(TableQueryPlanAction.class);
@RequestMapping(path = "/api/{" + DB_KEY + "}/{" + TABLE_KEY + "}/_query_plan", method = {RequestMethod.GET, RequestMethod.POST})
public Object query_plan(
@PathVariable(value = DB_KEY) final String dbName,
@PathVariable(value = TABLE_KEY) final String tblName,
HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
// just allocate 2 slot for top holder map
Map<String, Object> resultMap = new HashMap<>(4);
String postContent = HttpUtil.getBody(request);
try {
// may be these common validate logic should be moved to one base class
String sql;
if (Strings.isNullOrEmpty(postContent)) {
return ResponseEntityBuilder.badRequest("POST body must contains [sql] root object");
}
JSONObject jsonObject;
try {
jsonObject = new JSONObject(postContent);
} catch (JSONException e) {
return ResponseEntityBuilder.badRequest("malformed json: " + e.getMessage());
}
sql = String.valueOf(jsonObject.opt("sql"));
if (Strings.isNullOrEmpty(sql)) {
return ResponseEntityBuilder.badRequest("POST body must contains [sql] root object");
}
LOG.info("receive SQL statement [{}] from external service [ user [{}]] for database [{}] table [{}]",
sql, ConnectContext.get().getCurrentUserIdentity(), dbName, tblName);
String fullDbName = getFullDbName(dbName);
// check privilege for select, otherwise return HTTP 401
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.SELECT);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database [" + dbName + "] " + "does not exists");
}
// may be should acquire writeLock
db.readLock();
try {
Table table = db.getTable(tblName);
if (table == null) {
return ResponseEntityBuilder.okWithCommonError("Table [" + tblName + "] " + "does not exists");
}
// just only support OlapTable, ignore others such as ESTable
if (table.getType() != Table.TableType.OLAP) {
return ResponseEntityBuilder.okWithCommonError("only support OlapTable currently, "
+ "but Table [" + tblName + "] " + "is not a OlapTable");
}
// parse/analysis/plan the sql and acquire tablet distributions
handleQuery(ConnectContext.get(), fullDbName, tblName, sql, resultMap);
} finally {
db.readUnlock();
}
} catch (DorisHttpException e) {
// status code should conforms to HTTP semantic
resultMap.put("status", e.getCode().code());
resultMap.put("exception", e.getMessage());
}
return ResponseEntityBuilder.ok(resultMap);
}
/**
* process the sql syntax and return the resolved pruned tablet
*
* @param context context for analyzer
* @param sql the single table select statement
* @param result the acquired results
* @return
* @throws DorisHttpException
*/
private void handleQuery(ConnectContext context, String requestDb, String requestTable, String sql,
Map<String, Object> result) throws DorisHttpException {
// use SE to resolve sql
StmtExecutor stmtExecutor = new StmtExecutor(context, new OriginStatement(sql, 0), false);
try {
TQueryOptions tQueryOptions = context.getSessionVariable().toThrift();
// Conduct Planner create SingleNodePlan#createPlanFragments
tQueryOptions.num_nodes = 1;
// analyze sql
stmtExecutor.analyze(tQueryOptions);
} catch (Exception e) {
throw new DorisHttpException(HttpResponseStatus.BAD_REQUEST, e.getMessage());
}
// the parsed logical statement
StatementBase query = stmtExecutor.getParsedStmt();
// only process select semantic
if (!(query instanceof SelectStmt)) {
throw new DorisHttpException(HttpResponseStatus.BAD_REQUEST, "Select statement needed, but found [" + sql + " ]");
}
SelectStmt stmt = (SelectStmt) query;
// just only process sql like `select * from table where <predicate>`, only support executing scan semantic
if (stmt.hasAggInfo() || stmt.hasAnalyticInfo()
|| stmt.hasOrderByClause() || stmt.hasOffset() || stmt.hasLimit() || stmt.isExplain()) {
throw new DorisHttpException(HttpResponseStatus.BAD_REQUEST, "only support single table filter-prune-scan, but found [ " + sql + "]");
}
// process only one table by one http query
List<TableRef> fromTables = stmt.getTableRefs();
if (fromTables.size() != 1) {
throw new DorisHttpException(HttpResponseStatus.BAD_REQUEST, "Select statement must hava only one table");
}
TableRef fromTable = fromTables.get(0);
if (fromTable instanceof InlineViewRef) {
throw new DorisHttpException(HttpResponseStatus.BAD_REQUEST, "Select statement must not embed another statement");
}
// check consistent http requested resource with sql referenced
// if consistent in this way, can avoid check privilege
TableName tableAndDb = fromTables.get(0).getName();
if (!(tableAndDb.getDb().equals(requestDb) && tableAndDb.getTbl().equals(requestTable))) {
throw new DorisHttpException(HttpResponseStatus.BAD_REQUEST, "requested database and table must consistent with sql: request [ "
+ requestDb + "." + requestTable + "]" + "and sql [" + tableAndDb.toString() + "]");
}
// acquired Planner to get PlanNode and fragment templates
Planner planner = stmtExecutor.planner();
// acquire ScanNode to obtain pruned tablet
// in this way, just retrieve only one scannode
List<ScanNode> scanNodes = planner.getScanNodes();
if (scanNodes.size() != 1) {
throw new DorisHttpException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Planner should plan just only one ScanNode but found [ " + scanNodes.size() + "]");
}
List<TScanRangeLocations> scanRangeLocations = scanNodes.get(0).getScanRangeLocations(0);
// acquire the PlanFragment which the executable template
List<PlanFragment> fragments = planner.getFragments();
if (fragments.size() != 1) {
throw new DorisHttpException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Planner should plan just only one PlanFragment but found [ " + fragments.size() + "]");
}
TQueryPlanInfo tQueryPlanInfo = new TQueryPlanInfo();
// acquire TPlanFragment
TPlanFragment tPlanFragment = fragments.get(0).toThrift();
// set up TMemoryScratchSink
TDataSink tDataSink = new TDataSink();
tDataSink.type = TDataSinkType.MEMORY_SCRATCH_SINK;
tDataSink.memory_scratch_sink = new TMemoryScratchSink();
tPlanFragment.output_sink = tDataSink;
tQueryPlanInfo.plan_fragment = tPlanFragment;
tQueryPlanInfo.desc_tbl = query.getAnalyzer().getDescTbl().toThrift();
// set query_id
UUID uuid = UUID.randomUUID();
tQueryPlanInfo.query_id = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
Map<Long, TTabletVersionInfo> tablet_info = new HashMap<>();
// acquire resolved tablet distribution
Map<String, Node> tabletRoutings = assemblePrunedPartitions(scanRangeLocations);
tabletRoutings.forEach((tabletId, node) -> {
long tablet = Long.parseLong(tabletId);
tablet_info.put(tablet, new TTabletVersionInfo(tablet, node.version, node.versionHash, node.schemaHash));
});
tQueryPlanInfo.tablet_info = tablet_info;
// serialize TQueryPlanInfo and encode plan with Base64 to string in order to translate by json format
TSerializer serializer = new TSerializer();
String opaqued_query_plan;
try {
byte[] query_plan_stream = serializer.serialize(tQueryPlanInfo);
opaqued_query_plan = Base64.getEncoder().encodeToString(query_plan_stream);
} catch (TException e) {
throw new DorisHttpException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "TSerializer failed to serialize PlanFragment, reason [ " + e.getMessage() + " ]");
}
result.put("partitions", tabletRoutings);
result.put("opaqued_query_plan", opaqued_query_plan);
result.put("status", 200);
}
/**
* acquire all involved (already pruned) tablet routing
*
* @param scanRangeLocationsList
* @return
*/
private Map<String, Node> assemblePrunedPartitions(List<TScanRangeLocations> scanRangeLocationsList) {
Map<String, Node> result = new HashMap<>();
for (TScanRangeLocations scanRangeLocations : scanRangeLocationsList) {
// only process palo(doris) scan range
TPaloScanRange scanRange = scanRangeLocations.scan_range.palo_scan_range;
Node tabletRouting = new Node(Long.parseLong(scanRange.version),
Long.parseLong(scanRange.version_hash), Integer.parseInt(scanRange.schema_hash));
for (TNetworkAddress address : scanRange.hosts) {
tabletRouting.addRouting(address.hostname + ":" + address.port);
}
result.put(String.valueOf(scanRange.tablet_id), tabletRouting);
}
return result;
}
// helper class for json transformation
final class Node {
// ["host1:port1", "host2:port2", "host3:port3"]
public List<String> routings = new ArrayList<>();
public long version;
public long versionHash;
public int schemaHash;
public Node(long version, long versionHash, int schemaHash) {
this.version = version;
this.versionHash = versionHash;
this.schemaHash = schemaHash;
}
private void addRouting(String routing) {
routings.add(routing);
}
}
}

View File

@ -0,0 +1,92 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.OlapTable;
import org.apache.doris.catalog.Table;
import org.apache.doris.common.DorisHttpException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* This class is responsible for fetch the approximate row count of the specified table from cluster-meta data,
* the approximate row maybe used for some computing system to decide use which compute-algorithm can be used
* such as shuffle join or broadcast join.
* <p>
* This API is not intended to compute the exact row count of the specified table, if you need the exact row count,
* please consider using the sql syntax `select count(*) from {table}`
*/
@RestController
public class TableRowCountAction extends RestBaseController {
@RequestMapping(path = "/api/{" + DB_KEY + "}/{" + TABLE_KEY + "}/_count", method = RequestMethod.GET)
public Object count(
@PathVariable(value = DB_KEY) final String dbName,
@PathVariable(value = TABLE_KEY) final String tblName,
HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
// just allocate 2 slot for top holder map
Map<String, Object> resultMap = new HashMap<>(4);
try {
String fullDbName = getFullDbName(dbName);
// check privilege for select, otherwise return HTTP 401
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.SELECT);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database [" + dbName + "] " + "does not exists");
}
db.writeLock();
try {
Table table = db.getTable(tblName);
if (table == null) {
return ResponseEntityBuilder.okWithCommonError("Table [" + tblName + "] " + "does not exists");
}
// just only support OlapTable, ignore others such as ESTable
if (!(table instanceof OlapTable)) {
return ResponseEntityBuilder.okWithCommonError("Table [" + tblName + "] "
+ "is not a OlapTable, only support OlapTable currently");
}
OlapTable olapTable = (OlapTable) table;
resultMap.put("status", 200);
resultMap.put("size", olapTable.proximateRowCount());
} finally {
db.writeUnlock();
}
} catch (DorisHttpException e) {
// status code should conforms to HTTP semantic
resultMap.put("status", e.getCode().code());
resultMap.put("exception", e.getMessage());
}
return ResponseEntityBuilder.ok(resultMap);
}
}

View File

@ -0,0 +1,114 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.OlapTable;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Table;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.DorisHttpException;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Get table schema for specified cluster.database.table with privilege checking
*/
@RestController
public class TableSchemaAction extends RestBaseController {
@RequestMapping(path = "/api/{" + DB_KEY + "}/{" + TABLE_KEY + "}/_schema", method = RequestMethod.GET)
protected Object schema(
@PathVariable(value = DB_KEY) final String dbName,
@PathVariable(value = TABLE_KEY) final String tblName,
HttpServletRequest request, HttpServletResponse response) {
executeCheckPassword(request, response);
// just allocate 2 slot for top holder map
Map<String, Object> resultMap = new HashMap<>(2);
try {
String fullDbName = getFullDbName(dbName);
// check privilege for select, otherwise return 401 HTTP status
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.SELECT);
Database db = Catalog.getCurrentCatalog().getDb(fullDbName);
if (db == null) {
return ResponseEntityBuilder.okWithCommonError("Database [" + dbName + "] " + "does not exists");
}
db.readLock();
try {
Table table = db.getTable(tblName);
if (table == null) {
return ResponseEntityBuilder.okWithCommonError("Table [" + tblName + "] " + "does not exists");
}
// just only support OlapTable, ignore others such as ESTable
if (!(table instanceof OlapTable)) {
return ResponseEntityBuilder.okWithCommonError("Table [" + tblName + "] "
+ "is not a OlapTable, only support OlapTable currently");
}
try {
List<Column> columns = table.getBaseSchema();
List<Map<String, String>> propList = new ArrayList(columns.size());
for (Column column : columns) {
Map<String, String> baseInfo = new HashMap<>(2);
Type colType = column.getOriginType();
PrimitiveType primitiveType = colType.getPrimitiveType();
if (primitiveType == PrimitiveType.DECIMALV2 || primitiveType == PrimitiveType.DECIMAL) {
ScalarType scalarType = (ScalarType) colType;
baseInfo.put("precision", scalarType.getPrecision() + "");
baseInfo.put("scale", scalarType.getScalarScale() + "");
}
baseInfo.put("type", primitiveType.toString());
baseInfo.put("comment", column.getComment());
baseInfo.put("name", column.getDisplayName());
propList.add(baseInfo);
}
resultMap.put("status", 200);
resultMap.put("properties", propList);
} catch (Exception e) {
// Transform the general Exception to custom DorisHttpException
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
} finally {
db.readUnlock();
}
} catch (DorisHttpException e) {
// status code should conforms to HTTP semantic
resultMap.put("status", e.getCode().code());
resultMap.put("exception", e.getMessage());
}
return ResponseEntityBuilder.ok(resultMap);
}
}

View File

@ -0,0 +1,306 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.rest;
import org.apache.doris.common.Config;
import org.apache.doris.httpv2.entity.ResponseEntityBuilder;
import org.apache.doris.httpv2.util.LoadSubmitter;
import org.apache.doris.httpv2.util.TmpFileMgr;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.system.SystemInfoService;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
* Upload file
*/
@RestController
public class UploadAction extends RestBaseController {
private static final Logger LOG = LogManager.getLogger(UploadAction.class);
private static TmpFileMgr fileMgr = new TmpFileMgr(Config.tmp_dir);
private static LoadSubmitter loadSubmitter = new LoadSubmitter();
private static final String PARAM_COLUMN_SEPARATOR = "column_separator";
private static final String PARAM_PREVIEW = "preview";
private static final String PARAM_FILE_ID = "file_id";
private static final String PARAM_FILE_UUID = "file_uuid";
/**
* Upload the file
* @param ns
* @param dbName
* @param tblName
* @param file
* @param request
* @param response
* @return
*/
@RequestMapping(path = "/api/{" + NS_KEY + "}/{" + DB_KEY + "}/{" + TABLE_KEY + "}/upload", method = {RequestMethod.POST})
public Object upload(
@PathVariable(value = NS_KEY) String ns,
@PathVariable(value = DB_KEY) String dbName,
@PathVariable(value = TABLE_KEY) String tblName,
@RequestParam("file") MultipartFile file,
HttpServletRequest request, HttpServletResponse response) {
checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
String fullDbName = getFullDbName(dbName);
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.LOAD);
String columnSeparator = request.getParameter(PARAM_COLUMN_SEPARATOR);
if (Strings.isNullOrEmpty(columnSeparator)) {
columnSeparator = "\t";
}
String preview = request.getParameter(PARAM_PREVIEW);
if (Strings.isNullOrEmpty(preview)) {
preview = "false"; // default is false
}
if (file.isEmpty()) {
return ResponseEntityBuilder.badRequest("Empty file");
}
try {
TmpFileMgr.TmpFile tmpFile = fileMgr.upload(new TmpFileMgr.UploadFile(file, columnSeparator));
TmpFileMgr.TmpFile copiedFile = tmpFile.copy();
if (preview.equalsIgnoreCase("true")) {
copiedFile.setPreview();
}
return ResponseEntityBuilder.ok(copiedFile);
} catch (TmpFileMgr.TmpFileException | IOException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
}
/**
* Load the uploaded file
* @param ns
* @param dbName
* @param tblName
* @param request
* @param response
* @return
*/
@RequestMapping(path = "/api/{" + NS_KEY + "}/{" + DB_KEY + "}/{" + TABLE_KEY + "}/upload", method = {RequestMethod.PUT})
public Object submit(
@PathVariable(value = NS_KEY) String ns,
@PathVariable(value = DB_KEY) String dbName,
@PathVariable(value = TABLE_KEY) String tblName,
HttpServletRequest request, HttpServletResponse response) {
ActionAuthorizationInfo authInfo = checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
String fullDbName = getFullDbName(dbName);
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.LOAD);
String fileIdStr = request.getParameter(PARAM_FILE_ID);
if (Strings.isNullOrEmpty(fileIdStr)) {
return ResponseEntityBuilder.badRequest("Missing file id parameter");
}
String fileUUIDStr = request.getParameter(PARAM_FILE_UUID);
if (Strings.isNullOrEmpty(fileUUIDStr)) {
return ResponseEntityBuilder.badRequest("Missing file id parameter");
}
TmpFileMgr.TmpFile tmpFile = null;
try {
tmpFile = fileMgr.getFile(Long.valueOf(fileIdStr), fileUUIDStr);
} catch (TmpFileMgr.TmpFileException e) {
return ResponseEntityBuilder.okWithCommonError("file not found");
}
Preconditions.checkNotNull(tmpFile, fileIdStr);
LoadContext loadContext = new LoadContext(request, dbName, tblName, authInfo.fullUserName, authInfo.password, tmpFile);
Future<LoadSubmitter.SubmitResult> future = loadSubmitter.submit(loadContext);
try {
LoadSubmitter.SubmitResult res = future.get();
return ResponseEntityBuilder.ok(res);
} catch (InterruptedException | ExecutionException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
}
/**
* Get all uploaded file or specified file
* If preview is true, also return the the preview of the file
* @param ns
* @param dbName
* @param tblName
* @param request
* @param response
* @return
*/
@RequestMapping(path = "/api/{" + NS_KEY + "}/{" + DB_KEY + "}/{" + TABLE_KEY + "}/upload", method = {RequestMethod.GET})
public Object list(
@PathVariable(value = NS_KEY) String ns,
@PathVariable(value = DB_KEY) String dbName,
@PathVariable(value = TABLE_KEY) String tblName,
HttpServletRequest request, HttpServletResponse response) {
checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
String fullDbName = getFullDbName(dbName);
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.LOAD);
String fileIdStr = request.getParameter(PARAM_FILE_ID);
String fileUUIDStr = request.getParameter(PARAM_FILE_UUID);
if (Strings.isNullOrEmpty(fileIdStr) || Strings.isNullOrEmpty(fileUUIDStr)) {
// not specified file id, return all files list
List<TmpFileMgr.TmpFileBrief> files = fileMgr.listFiles();
return ResponseEntityBuilder.ok(files);
}
// return specified file
String preview = request.getParameter(PARAM_PREVIEW);
if (Strings.isNullOrEmpty(preview)) {
preview = "true"; // default is true
}
try {
TmpFileMgr.TmpFile tmpFile = fileMgr.getFile(Long.valueOf(fileIdStr), fileUUIDStr);
TmpFileMgr.TmpFile copiedFile = tmpFile.copy();
if (preview.equalsIgnoreCase("true")) {
copiedFile.setPreview();
}
return ResponseEntityBuilder.ok(copiedFile);
} catch (TmpFileMgr.TmpFileException | IOException e) {
return ResponseEntityBuilder.okWithCommonError(e.getMessage());
}
}
@RequestMapping(path = "/api/{" + NS_KEY + "}/{" + DB_KEY + "}/{" + TABLE_KEY + "}/upload", method = {RequestMethod.DELETE})
public Object delete(
@PathVariable(value = NS_KEY) String ns,
@PathVariable(value = DB_KEY) String dbName,
@PathVariable(value = TABLE_KEY) String tblName,
HttpServletRequest request, HttpServletResponse response) {
checkWithCookie(request, response, false);
if (!ns.equalsIgnoreCase(SystemInfoService.DEFAULT_CLUSTER)) {
return ResponseEntityBuilder.badRequest("Only support 'default_cluster' now");
}
String fullDbName = getFullDbName(dbName);
checkTblAuth(ConnectContext.get().getCurrentUserIdentity(), fullDbName, tblName, PrivPredicate.LOAD);
String fileIdStr = request.getParameter(PARAM_FILE_ID);
if (Strings.isNullOrEmpty(fileIdStr)) {
return ResponseEntityBuilder.badRequest("Missing file id parameter");
}
String fileUUIDStr = request.getParameter(PARAM_FILE_UUID);
if (Strings.isNullOrEmpty(fileUUIDStr)) {
return ResponseEntityBuilder.badRequest("Missing file id parameter");
}
fileMgr.deleteFile(Long.valueOf(fileIdStr), fileUUIDStr);
return ResponseEntityBuilder.ok();
}
/**
* A context to save infos of stream load
*/
public static class LoadContext {
public String user;
public String passwd;
public String db;
public String tbl;
public TmpFileMgr.TmpFile file;
public String label;
public String columnSeparator;
public String columns;
public String where;
public String maxFilterRatio;
public String partitions;
public String timeout;
public String strictMode;
public String timezone;
public String execMemLimit;
public String format;
public String jsonPaths;
public String stripOuterArray;
public String jsonRoot;
public LoadContext(HttpServletRequest request, String db, String tbl, String user, String passwd, TmpFileMgr.TmpFile file) {
this.db = db;
this.tbl = tbl;
this.user = user;
this.passwd = passwd;
this.file = file;
parseHeader(request);
}
private void parseHeader(HttpServletRequest request) {
this.label = request.getHeader("label");
this.columnSeparator = file.columnSeparator;
if (!Strings.isNullOrEmpty(request.getHeader("column_separator"))) {
this.columnSeparator = request.getHeader("column_separator");
}
this.columns = request.getHeader("columns");
this.where = request.getHeader("where");
this.maxFilterRatio = request.getHeader("max_filter_ratio");
this.partitions = request.getHeader("partitions");
this.timeout = request.getHeader("timeout");
this.strictMode = request.getHeader("strict_mode");
this.timezone = request.getHeader("timezone");
this.execMemLimit = request.getHeader("exec_mem_limit");
this.format = request.getHeader("format");
this.jsonPaths = request.getHeader("jsonpaths");
this.stripOuterArray = request.getHeader("strip_outer_array");
this.jsonRoot = request.getHeader("json_root");
}
}
}

View File

@ -0,0 +1,48 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.util;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.Map;
public class ExecutionResultSet {
private Map<String, Object> result;
public ExecutionResultSet(Map<String, Object> result) {
this.result = result;
}
public void setResult(Map<String, Object> result) {
this.result = result;
}
public Map<String, Object> getResult() {
return result;
}
public static ExecutionResultSet emptyResult() {
Map<String, Object> result = Maps.newHashMap();
result.put("meta", Lists.newArrayList());
result.put("data", Lists.newArrayList());
return new ExecutionResultSet(result);
}
}

View File

@ -0,0 +1,61 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.util;
import com.google.common.base.Strings;
import java.io.BufferedReader;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import static org.springframework.http.HttpHeaders.CONNECTION;
public class HttpUtil {
public static boolean isKeepAlive(HttpServletRequest request) {
if (!request.getHeader(CONNECTION).equals("close") &&
(request.getProtocol().equals("") ||
request.getHeader(CONNECTION).equals("keep-alive"))) {
return true;
}
return false;
}
public static boolean isSslEnable(HttpServletRequest request) {
String url = request.getRequestURL().toString();
if (!Strings.isNullOrEmpty(url) && url.startsWith("https")) {
return true;
}
return false;
}
public static String getBody(HttpServletRequest request) {
StringBuffer data = new StringBuffer();
String line = null;
BufferedReader reader = null;
try {
reader = request.getReader();
while (null != (line = reader.readLine()))
data.append(new String(line.getBytes("utf-8")));
} catch (IOException e) {
} finally {
}
return data.toString();
}
}

View File

@ -0,0 +1,143 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.util;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.Config;
import org.apache.doris.common.ThreadPoolManager;
import org.apache.doris.httpv2.rest.UploadAction;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
public class LoadSubmitter {
private static final Logger LOG = LogManager.getLogger(LoadSubmitter.class);
private ThreadPoolExecutor executor = ThreadPoolManager.newDaemonCacheThreadPool(2, "Load submitter", true);
private static final String STREAM_LOAD_URL_PATTERN = "http://%s:%d/api/%s/%s/_stream_load";
public Future<SubmitResult> submit(UploadAction.LoadContext loadContext) {
LoadSubmitter.Worker worker = new LoadSubmitter.Worker(loadContext);
return executor.submit(worker);
}
private static class Worker implements Callable<SubmitResult> {
private UploadAction.LoadContext loadContext;
public Worker(UploadAction.LoadContext loadContext) {
this.loadContext = loadContext;
}
@Override
public SubmitResult call() throws Exception {
String auth = String.format("%s:%s", ClusterNamespace.getNameFromFullName(loadContext.user), loadContext.passwd);
String authEncoding = Base64.getEncoder().encodeToString(auth.getBytes(StandardCharsets.UTF_8));
String loadUrlStr = String.format(STREAM_LOAD_URL_PATTERN, "127.0.0.1", Config.http_port, loadContext.db, loadContext.tbl);
URL loadUrl = new URL(loadUrlStr);
HttpURLConnection conn = (HttpURLConnection) loadUrl.openConnection();
conn.setRequestMethod("PUT");
conn.setRequestProperty("Authorization", "Basic " + authEncoding);
conn.addRequestProperty("Expect", "100-continue");
conn.addRequestProperty("Content-Type", "text/plain; charset=UTF-8");
if (!Strings.isNullOrEmpty(loadContext.columns)) {
conn.addRequestProperty("columns", loadContext.columns);
}
if (!Strings.isNullOrEmpty(loadContext.columnSeparator)) {
conn.addRequestProperty("column_separator", loadContext.columnSeparator);
}
if (!Strings.isNullOrEmpty(loadContext.label)) {
conn.addRequestProperty("label", loadContext.label);
}
conn.setDoOutput(true);
conn.setDoInput(true);
File loadFile = checkAndGetFile(loadContext.file);
try(BufferedOutputStream bos = new BufferedOutputStream(conn.getOutputStream());
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(loadFile));) {
int i;
while ((i = bis.read()) > 0) {
bos.write(i);
}
}
int status = conn.getResponseCode();
String respMsg = conn.getResponseMessage();
LOG.info("get status: {}, response msg: {}", status, respMsg);
InputStream stream = (InputStream) conn.getContent();
BufferedReader br = new BufferedReader(new InputStreamReader(stream));
StringBuilder sb = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
sb.append(line);
}
Type type = new TypeToken<SubmitResult>() {
}.getType();
SubmitResult result = new Gson().fromJson(sb.toString(), type);
return result;
}
private File checkAndGetFile(TmpFileMgr.TmpFile tmpFile) {
File file = new File(tmpFile.absPath);
return file;
}
}
public static class SubmitResult {
public String TxnId;
public String Label;
public String Status;
public String Message;
public String NumberTotalRows;
public String NumberLoadedRows;
public String NumberFilteredRows;
public String NumberUnselectedRows;
public String LoadBytes;
public String LoadTimeMs;
public String BeginTxnTimeMs;
public String StreamLoadPutTimeMs;
public String ReadDataTimeMs;
public String WriteDataTimeMs;
public String CommitAndPublishTimeMs;
public String ErrorURL;
}
}

View File

@ -0,0 +1,217 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.util;
import org.apache.doris.analysis.DdlStmt;
import org.apache.doris.analysis.ExportStmt;
import org.apache.doris.analysis.InsertStmt;
import org.apache.doris.analysis.QueryStmt;
import org.apache.doris.analysis.ShowStmt;
import org.apache.doris.analysis.SqlParser;
import org.apache.doris.analysis.SqlScanner;
import org.apache.doris.analysis.StatementBase;
import org.apache.doris.common.Config;
import org.apache.doris.common.ThreadPoolManager;
import org.apache.doris.common.util.SqlParserUtils;
import org.apache.doris.qe.ConnectContext;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.io.StringReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
/**
* This is a simple stmt submitter for submitting a statement to the local FE.
* It uses a fixed-size thread pool to receive query requests,
* so it is only suitable for a small number of low-frequency request scenarios.
* Now it support submitting the following type of stmt:
* QueryStmt
* ShowStmt
* InsertStmt
* DdlStmt
* ExportStmt
*/
public class StatementSubmitter {
private static final Logger LOG = LogManager.getLogger(StatementSubmitter.class);
private static final String TYPE_RESULT_SET = "result_set";
private static final String TYPE_EXEC_STATUS = "exec_status";
private static final String JDBC_DRIVER = "com.mysql.jdbc.Driver";
private static final String DB_URL_PATTERN = "jdbc:mysql://127.0.0.1:%d/%s";
private ThreadPoolExecutor executor = ThreadPoolManager.newDaemonCacheThreadPool(2, "SQL submitter", true);
public Future<ExecutionResultSet> submit(StmtContext queryCtx) {
Worker worker = new Worker(ConnectContext.get(), queryCtx);
return executor.submit(worker);
}
private static class Worker implements Callable<ExecutionResultSet> {
private ConnectContext ctx;
private StmtContext queryCtx;
public Worker(ConnectContext ctx, StmtContext queryCtx) {
this.ctx = ctx;
this.queryCtx = queryCtx;
}
@Override
public ExecutionResultSet call() throws Exception {
StatementBase stmtBase = analyzeStmt(queryCtx.stmt);
Connection conn = null;
Statement stmt = null;
String dbUrl = String.format(DB_URL_PATTERN, Config.query_port, ctx.getDatabase());
try {
Class.forName(JDBC_DRIVER);
conn = DriverManager.getConnection(dbUrl, queryCtx.user, queryCtx.passwd);
if (stmtBase instanceof QueryStmt || stmtBase instanceof ShowStmt) {
stmt = conn.prepareStatement(queryCtx.stmt, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
ResultSet rs = stmt.executeQuery(queryCtx.stmt);
ExecutionResultSet resultSet = generateResultSet(rs);
rs.close();
return resultSet;
} else if (stmtBase instanceof InsertStmt || stmtBase instanceof DdlStmt || stmtBase instanceof ExportStmt) {
stmt = conn.createStatement();
stmt.execute(queryCtx.stmt);
ExecutionResultSet resultSet = generateExecStatus();
return resultSet;
} else {
throw new Exception("Unsupported statement type");
}
} finally {
try {
if (stmt != null) {
stmt.close();
}
} catch (SQLException se2) {
LOG.warn("failed to close stmt", se2);
}
try {
if (conn != null) conn.close();
} catch (SQLException se) {
LOG.warn("failed to close connection", se);
}
}
}
/**
* Result json sample:
* {
* "type": "result_set",
* "data": [
* [1],
* [2]
* ],
* "meta": [{
* "name": "k1",
* "type": "INT"
* }],
* "status": {}
* }
*/
private ExecutionResultSet generateResultSet(ResultSet rs) throws SQLException {
Map<String, Object> result = Maps.newHashMap();
result.put("type", TYPE_RESULT_SET);
if (rs == null) {
return new ExecutionResultSet(result);
}
ResultSetMetaData metaData = rs.getMetaData();
int colNum = metaData.getColumnCount();
// 1. metadata
List<Map<String, String>> metaFields = Lists.newArrayList();
// index start from 1
for (int i = 1; i <= colNum; ++i) {
Map<String, String> field = Maps.newHashMap();
field.put("name", metaData.getColumnName(i));
field.put("type", metaData.getColumnTypeName(i));
metaFields.add(field);
}
// 2. data
List<List<Object>> rows = Lists.newArrayList();
long rowCount = 0;
while (rs.next() && rowCount < queryCtx.limit) {
List<Object> row = Lists.newArrayListWithCapacity(colNum);
// index start from 1
for (int i = 1; i <= colNum; ++i) {
row.add(rs.getObject(i));
}
rows.add(row);
rowCount++;
}
result.put("meta", metaFields);
result.put("data", rows);
return new ExecutionResultSet(result);
}
/**
* Result json sample:
* {
* "type": "exec_status",
* "status": {}
* }
*/
private ExecutionResultSet generateExecStatus() throws SQLException {
Map<String, Object> result = Maps.newHashMap();
result.put("type", TYPE_EXEC_STATUS);
result.put("status", Maps.newHashMap());
return new ExecutionResultSet(result);
}
private StatementBase analyzeStmt(String stmtStr) throws Exception {
SqlParser parser = new SqlParser(new SqlScanner(new StringReader(stmtStr)));
try {
return SqlParserUtils.getFirstStmt(parser);
} catch (Exception e) {
throw new Exception("error happens when parsing stmt: " + e.getMessage());
}
}
}
public static class StmtContext {
public String stmt;
public String user;
public String passwd;
public long limit; // limit the number of rows returned by the stmt
public StmtContext(String stmt, String user, String passwd, long limit) {
this.stmt = stmt;
this.user = user;
this.passwd = passwd;
this.limit = limit;
}
}
}

View File

@ -0,0 +1,306 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.httpv2.util;
import org.apache.doris.common.util.Util;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.web.multipart.MultipartFile;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
/**
* Manager the file uploaded.
* This file manager is currently only used to manage files
* uploaded through the Upload RESTFul API.
* And limit the number and size of the maximum upload file.
* It can also browse or delete files through the RESTFul API.
*/
public class TmpFileMgr {
public static final Logger LOG = LogManager.getLogger(TmpFileMgr.class);
private static final long MAX_TOTAL_FILE_SIZE_BYTES = 1 * 1024 * 1024 * 1024L; // 1GB
private static final long MAX_TOTAL_FILE_NUM = 100;
public static final long MAX_SINGLE_FILE_SIZE = 100 * 1024 * 1024L; // 100MB
private static final String UPLOAD_DIR = "_doris_upload";
private AtomicLong fileIdGenerator = new AtomicLong(0);
private String rootDir;
private Map<Long, TmpFile> fileMap = Maps.newConcurrentMap();
private long totalFileSize = 0;
public TmpFileMgr(String dir) {
this.rootDir = dir + "/" + UPLOAD_DIR;
init();
}
private void init() {
File root = new File(rootDir);
if (!root.exists()) {
root.mkdirs();
} else if (!root.isDirectory()) {
throw new IllegalStateException("Path " + rootDir + " is not directory");
}
// delete all files under this dir at startup.
// This means that all uploaded files will be lost after FE restarts.
// This is just for simplicity.
Util.deleteDirectory(root);
root.mkdirs();
}
/**
* Simply used `synchronized` to allow only one user upload file at one time.
* So that we can easily control the number of files and total size of files.
*
* @param uploadFile
* @return
* @throws TmpFileException
*/
public synchronized TmpFile upload(UploadFile uploadFile) throws TmpFileException {
if (uploadFile.file.getSize() > MAX_SINGLE_FILE_SIZE) {
throw new TmpFileException("File size " + uploadFile.file.getSize() + " exceed limit " + MAX_SINGLE_FILE_SIZE);
}
if (totalFileSize + uploadFile.file.getSize() > MAX_TOTAL_FILE_SIZE_BYTES) {
throw new TmpFileException("Total file size will exceed limit " + MAX_TOTAL_FILE_SIZE_BYTES);
}
if(fileMap.size() > MAX_TOTAL_FILE_NUM) {
throw new TmpFileException("Number of temp file " + fileMap.size() + " exceed limit " + MAX_TOTAL_FILE_NUM);
}
long fileId = fileIdGenerator.incrementAndGet();
String fileUUID = UUID.randomUUID().toString();
TmpFile tmpFile = new TmpFile(fileId, fileUUID, uploadFile.file.getOriginalFilename(),
uploadFile.file.getSize(), uploadFile.columnSeparator);
try {
tmpFile.save(uploadFile.file);
} catch (IOException e) {
throw new TmpFileException("Failed to upload file. Reason: " + e.getMessage());
}
fileMap.put(tmpFile.id, tmpFile);
totalFileSize += uploadFile.file.getSize();
return tmpFile;
}
public TmpFile getFile(long id, String uuid) throws TmpFileException {
TmpFile tmpFile = fileMap.get(id);
if (tmpFile == null || !tmpFile.uuid.equals(uuid)) {
throw new TmpFileException("File with [" + id + "-" + uuid + "] does not exist");
}
return tmpFile;
}
public List<TmpFileBrief> listFiles() {
return fileMap.values().stream().map(t -> new TmpFileBrief(t)).collect(Collectors.toList());
}
/**
* Delete the specified file and remove it from fileMap
* @param fileId
* @param fileUUID
*/
public void deleteFile(Long fileId, String fileUUID) {
Iterator<Map.Entry<Long, TmpFile>> iterator = fileMap.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<Long, TmpFile> entry = iterator.next();
if (entry.getValue().id == fileId && entry.getValue().uuid.equals(fileUUID)) {
entry.getValue().delete();
iterator.remove();
}
}
return;
}
public class TmpFile {
public final long id;
public final String uuid;
public final String originFileName;
public final long fileSize;
public String columnSeparator;
public String absPath;
public List<List<String>> lines = null;
public int maxColNum = 0;
private static final int MAX_PREVIEW_LINES = 10;
public TmpFile(long id, String uuid, String originFileName, long fileSize, String columnSeparator) {
this.id = id;
this.uuid = uuid;
this.originFileName = originFileName;
this.fileSize = fileSize;
this.columnSeparator = columnSeparator;
}
public void save(MultipartFile file) throws IOException {
File dest = new File(Joiner.on("/").join(rootDir, uuid));
boolean uploadSucceed = false;
try {
file.transferTo(dest);
this.absPath = dest.getAbsolutePath();
uploadSucceed = true;
LOG.info("upload file {} succeed at {}", this, dest.getAbsolutePath());
} catch (IOException e) {
LOG.warn("failed to upload file {}, dest: {}", this, dest.getAbsolutePath(), e);
throw e;
} finally {
if (!uploadSucceed) {
dest.delete();
}
}
}
public void setPreview() throws IOException {
lines = Lists.newArrayList();
try (FileReader fr = new FileReader(absPath);
BufferedReader bf = new BufferedReader(fr)) {
String str;
while ((str = bf.readLine()) != null) {
String[] cols = str.split(columnSeparator);
lines.add(Lists.newArrayList(cols));
if (cols.length > maxColNum) {
maxColNum = cols.length;
}
if (lines.size() >= MAX_PREVIEW_LINES) {
break;
}
}
}
}
// make a copy without lines and maxColNum.
// so that can call `setPreview` and will not affect other instance
public TmpFile copy() {
TmpFile copiedFile = new TmpFile(this.id, this.uuid, this.originFileName, this.fileSize, this.columnSeparator);
copiedFile.absPath = this.absPath;
return copiedFile;
}
public void delete() {
File file = new File(absPath);
file.delete();
LOG.info("delete tmp file: {}", this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[id=").append(id).append(", uuid=").append(uuid).append(", origin name=").append(originFileName)
.append(", size=").append(fileSize).append("]");
return sb.toString();
}
}
// a brief of TmpFile.
// TODO(cmy): it can be removed by using Lombok's annotation in TmpFile class
public static class TmpFileBrief {
public long id;
public String uuid;
public String originFileName;
public long fileSize;
public String columnSeparator;
public TmpFileBrief(TmpFile tmpFile) {
this.id = tmpFile.id;
this.uuid = tmpFile.uuid;
this.originFileName = tmpFile.originFileName;
this.fileSize = tmpFile.fileSize;
this.columnSeparator = tmpFile.columnSeparator;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public String getOriginFileName() {
return originFileName;
}
public void setOriginFileName(String originFileName) {
this.originFileName = originFileName;
}
public long getFileSize() {
return fileSize;
}
public void setFileSize(long fileSize) {
this.fileSize = fileSize;
}
public String getColumnSeparator() {
return columnSeparator;
}
public void setColumnSeparator(String columnSeparator) {
this.columnSeparator = columnSeparator;
}
}
public static class UploadFile {
public MultipartFile file;
public String columnSeparator;
public UploadFile(MultipartFile file, String columnSeparator) {
this.file = file;
this.columnSeparator = columnSeparator;
}
}
public static class TmpFileException extends Exception {
public TmpFileException(String msg) {
super(msg);
}
public TmpFileException(String msg, Throwable t) {
super(msg, t);
}
}
}