Skip to content

Commit

Permalink
Oozie coordinator workflow and Hive action for loading tweets
Browse files Browse the repository at this point in the history
This adds an Oozie coordinator workflow which executes every hours and performs
a load of the last hour of Twitter data into Hive.
  • Loading branch information
Jon Natkins committed Aug 30, 2012
1 parent ea95716 commit 294e9af
Show file tree
Hide file tree
Showing 4 changed files with 139 additions and 0 deletions.
56 changes: 56 additions & 0 deletions oozie-workflows/coord-app.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<coordinator-app name="load-tweets-coord" frequency="${coord:hours(1)}"
start="${jobStart}" end="${jobEnd}"
timezone="UTC"
xmlns="uri:oozie:coordinator:0.1">
<datasets>
<dataset name="tweets" frequency="${coord:hours(1)}"
initial-instance="${initialDataset}" timezone="UTC">
<uri-template>hdfs://hadoop1:8020/user/flume/tweets/${YEAR}/${MONTH}/${DAY}/${HOUR}</uri-template>
<done-flag></done-flag>
</dataset>
</datasets>
<input-events>
<data-in name="input" dataset="tweets">
<!-- The integer value here should be the offset of your time zone from GMT,
so for PT, this value is -8 -->
<instance>${coord:current(-8)}</instance>
</data-in>
<data-in name="readyIndicator" dataset="tweets">
<!-- I've done something here that is a little bit of a hack. Since Flume
doesn't have a good mechanism for notifying an application when its
rolled to a new directory, we can just use the next directory as an
input event, which instructs Oozie not to kick off a coordinator
action until the next dataset starts being available. -->
<instance>${coord:current(-7)}</instance>
</data-in>
</input-events>
<action>
<workflow>
<app-path>${workflowRoot}/hive-action.xml</app-path>
<configuration>
<property>
<name>wfInput</name>
<value>${coord:dataIn('input')}</value>
</property>
</configuration>
</workflow>
</action>
</coordinator-app>
49 changes: 49 additions & 0 deletions oozie-workflows/hive-action.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<workflow-app xmlns="uri:oozie:workflow:0.2" name="hive-load-tweets-wf">
<start to="hive-load-tweets"/>

<action name="hive-load-tweets">
<hive xmlns="uri:oozie:hive-action:0.2">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<prepare>
<delete path="${workflowRoot}/output-data/hive"/>
<mkdir path="${workflowRoot}/output-data"/>
</prepare>
<job-xml>${workflowRoot}/hive-site.xml</job-xml>
<configuration>
<property>
<name>oozie.hive.defaults</name>
<value>${workflowRoot}/hive-site.xml</value>
</property>
</configuration>
<script>load_tweets.q</script>
<param>JSON_SERDE=${workflowRoot}/lib/custom-serdes-1.0-SNAPSHOT.jar</param>
<param>WFINPUT=${wfInput}</param>
</hive>
<ok to="end"/>
<error to="fail"/>
</action>

<kill name="fail">
<message>Hive failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<end name="end"/>
</workflow-app>
32 changes: 32 additions & 0 deletions oozie-workflows/job.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
nameNode=hdfs://hadoop1:8020
jobTracker=hadoop1:8021
workflowRoot=${nameNode}/user/${user.name}/oozie-workflows

# jobStart and jobEnd must be in UTC, because Oozie does not yet support
# custom timezones
jobStart=2012-08-23T17:00Z
jobEnd=2013-12-12T23:00Z

# This should be set to an hour boundary. In this case, it is set to 8 hours
# before the jobStart, since PST is GMT-8
initialDataset=2012-08-23T09:00Z

oozie.use.system.libpath=true
oozie.coord.application.path=${nameNode}/user/${user.name}/oozie-workflows/coord-app.xml
2 changes: 2 additions & 0 deletions oozie-workflows/load_tweets.q
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ADD JAR ${JSON_SERDE};
LOAD DATA INPATH '${WFINPUT}/*' INTO TABLE tweets;

0 comments on commit 294e9af

Please sign in to comment.