2017-01-02 255 views
-3
SELECT 
    STRFTIME_UTC_USEC(TimeStamp,"%Y-%m-%d %H:%M:%S") AS TimeStamp, 
    Value.provided, 
    __key__.app AS ProjectID, 
    REGEXP_EXTRACT(__key__.path, r'"hostname"[, ]*"(.*?)"') AS hostname, 
    REGEXP_EXTRACT(__key__.path, r'"machine"[, ]*"(.*?)"') AS machine, 
    REGEXP_EXTRACT(__key__.path, r'"variable"[, ]*"(.*?)"') AS variable, 
    IF(value.provided = 'integer', CAST(value.integer AS STRING),    
    CAST(value.boolean AS STRING)) AS value 
FROM 
    [spark-test-project-152415:spark_machine_learning.spark_12272016] 
ORDER BY 
    TimeStamp 
LIMIT 100000 

以上查询将提取数据集,如附图所示。我需要将一个变量列分成多个列,其中包含值。我认为它必须使用子查询来完成。我怎样才能开始呢?将列拆分为多列

enter image description here

预期输出:

expected output 与PIVOT查询

SELECT * FROM (SELECT #Timestamp, STRFTIME_UTC_USEC(TimeStamp,"%Y-%m-%d %H:%M:%S") AS [TimeStamp], Value.provided, __key__.app AS ProjectID, REGEXP_EXTRACT(__key__.path, r'"hostname"[, ]*"(.*?)"') AS [hostname], REGEXP_EXTRACT(__key__.path, r'"machine"[, ]*"(.*?)"') AS [machine], REGEXP_EXTRACT(__key__.path, r'"variable"[, ]*"(.*?)"') AS [variable], IF(value.provided = 'integer', CAST(value.integer AS STRING), CAST(value.boolean AS STRING)) AS [value] FROM [spark-test-project-152415:spark_machine_learning.spark_12272016] ORDER BY TimeStamp) AS SourceTable PIVOT ([value] FOR [variable] IN ([Counter_Strokes_No_Reset], [Press_State_Code], [Press_Operator_1], [Press_Stop_Time_Limit], [Counter_Good_Parts_No_Reset], [Press_Error_Reason_Code], [Counter_Scrap_No_Reset], [Production_Tool_Number], [Press_Stop_Time_Actual], [Production_Good_Parts_Preset], [Press_Shaft_Speed], [Production_Part_Number], [Press_Total_Tonnage], [Production_Job_Number])) AS PivotTable

+0

从我了解的图像显示您的电流输出。你能提供预期产出的例子! –

+0

https://i.stack.imgur.com/Xa1hg.png –

+0

@Mikhail,我想要得到像上面链接 –

回答

1

我怎样才能得到这个开始?

尝试以下,可能会给你一个想法

SELECT 
    [TimeStamp], 
    Value_Provided, 
    ProjectID, 
    hostname, 
    machine, 
    SUM(CASE WHEN variable = 'Counter_Strokes_No_Reset' THEN value.integer END) AS Counter_Strokes_No_Reset, 
    SUM(CASE WHEN variable = 'Press_State_Code' THEN value.integer END) AS Press_State_Code, 
    SUM(CASE WHEN variable = 'Press_Operator_1' THEN value.integer END) AS Press_Operator_1, 
    SUM(CASE WHEN variable = 'Press_Stop_Time_Limit' THEN value.integer END) AS Press_Stop_Time_Limit, 
    SUM(CASE WHEN variable = 'Counter_Good_Parts_No_Reset' THEN value.integer END) AS Counter_Good_Parts_No_Reset, 
    SUM(CASE WHEN variable = 'Press_Error_Reason_Code' THEN value.integer END) AS Press_Error_Reason_Code, 
    SUM(CASE WHEN variable = 'Counter_Scrap_No_Reset' THEN value.integer END) AS Counter_Scrap_No_Reset, 
    SUM(CASE WHEN variable = 'Production_Tool_Number' THEN value.integer END) AS Production_Tool_Number, 
    SUM(CASE WHEN variable = 'Press_Stop_Time_Actual' THEN value.integer END) AS Press_Stop_Time_Actual, 
    SUM(CASE WHEN variable = 'Production_Good_Parts_Preset' THEN value.integer END) AS Production_Good_Parts_Preset, 
    SUM(CASE WHEN variable = 'Press_Shaft_Speed' THEN value.integer END) AS Press_Shaft_Speed, 
    SUM(CASE WHEN variable = 'Production_Part_Number' THEN value.integer END) AS Production_Part_Number, 
    SUM(CASE WHEN variable = 'Press_Total_Tonnage' THEN value.integer END) AS Press_Total_Tonnage, 
    SUM(CASE WHEN variable = 'Production_Job_Number' THEN value.integer END) AS Production_Job_Number 
FROM (
    SELECT 
    STRFTIME_UTC_USEC(TIMESTAMP,"%Y-%m-%d %H:%M:%S") AS [TimeStamp], 
    Value.provided AS Value_Provided, 
    __key__.app AS ProjectID, 
    REGEXP_EXTRACT(__key__.path, r'"hostname"[, ]*"(.*?)"') AS [hostname], 
    REGEXP_EXTRACT(__key__.path, r'"machine"[, ]*"(.*?)"') AS [machine], 
    REGEXP_EXTRACT(__key__.path, r'"variable"[, ]*"(.*?)"') AS [variable], 
    IF(value.provided = 'integer', CAST(value.integer AS INTEGER), CAST(value.boolean AS INTEGER)) AS [value] 
    FROM [spark-test-project-152415:spark_machine_learning.spark_12272016] 
) 
GROUP BY [TimeStamp], Value_Provided, ProjectID, hostname, machine 
ORDER BY [TimeStamp] 
+0

谢谢@Mikhail能够获得所需的输出 –