Extracting 1 - 10 lines from an input.txt file and write to output.txt
sed -n -e '1,10p' input.txt > output.txt
Extracting 1 - 10 lines from an input.txt file and write to output.txt
sed -n -e '1,10p' input.txt > output.txt
1 2 3 4 5 6 | nameNode=hdfs://localhost:8020 jobTracker=localhost:8050 queueName=default oozie.wf.application.path=/<Hdfs Path>/sampleworkflow.xml oozie.use.system.libpath=true oozie.libpath=<shared lib path on Hdfs> |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 | <workflow-app name="shell-wf" xmlns="uri:oozie:workflow:0.4"> <start to="shell-node"/> <action name="shell-node"> <shell xmlns="uri:oozie:shell-action:0.2"> <job-tracker>${jobTracker}</job-tracker> <name-node>${nameNode}</name-node> <configuration> <property> <name>mapred.job.queue.name</name> <value>${queueName}</value> </property> </configuration> <exec>sample.sh</exec> <file>/<Hdfs Path of the shell script>/sample.sh</file> </shell> <ok to="end"/> <error to="fail"/> </action> <kill name="fail"> <message>Shell action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> </kill> <end name="end"/> </workflow-app> |
1 | oozie job -oozie http://localhost:11000/oozie -config job.properties -run
|
1 | container.connectToServer(endpoint, clientConfig, new URI("wss://hostname:port/demo")); |
1 2 3 4 5 6 7 8 9 10 11 | ClientEndpointConfig.Configurator configurator = new ClientEndpointConfig.Configurator() { public void beforeRequest(Map<String, List<String>> headers) { String credentials = "username:password"; headers.put("Authorization", Arrays.asList("Basic " + new BASE64Encoder().encode(credentials.getBytes()))); System.out.println("Header set successfully"); } }; ClientEndpointConfig clientConfig = ClientEndpointConfig.Builder.create() .configurator(configurator) .build(); |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | Endpoint endpoint = new Endpoint() { @Override public void onOpen(Session session, EndpointConfig config) { session.addMessageHandler(new MessageHandler.Whole<String>() { @Override public void onMessage(String content) { System.out.println("Received message: "+content); } }); try { System.out.println("Sending message to endpoint: " + msg); System.out.println("Session Id:: "+session.getId()); session.getBasicRemote().sendText(msg); } catch (Exception e) { e.printStackTrace(); } } }; |
1 | install.packages("RJDBC")
|
1 | nodetool statusthrift |
1 | nodetool enablethrift |
1 2 3 4 | library(RJDBC) cassdrv <- JDBC("org.apache.cassandra.cql.jdbc.CassandraDriver",list.files("/usr/share/dse/cassandra/lib/",pattern="jar$",full.names=T)) casscon <- dbConnect(cassdrv, "jdbc:cassandra://localhost:9160/test") res <- dbGetQuery(casscon, "select * from emp") |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 | public class RecordAccmulator implements AccumulatorParam<Map<String, String>> { private static final long serialVersionUID = 1L; @Override public Map<String, String> addInPlace(Map<String, String> arg0, Map<String, String> arg1) { Map<String, String> map = new HashMap<>(); map.putAll(arg0); map.putAll(arg1); return map; } @Override public Map<String, String> zero(Map<String, String> arg0) { return new HashMap<>(); } @Override public Map<String, String> addAccumulator(Map<String, String> arg0, Map<String, String> arg1) { return addInPlace(arg0, arg1); } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 | JavaSparkContext sc = SparkUtils.createSparkContext(MyTest.class.getName(), "local[*]"); SQLContext hiveContext = SparkUtils.getSQLContext(sc); JavaRDD<String> file = sc.textFile("inputfile.txt"); logger.info("File Record Count:: "+file.count()); Accumulator<Map<String, String>> accm = sc.accumulator(new HashMap<>(), new RecordAccmulator()); JavaPairRDD<String, String> filePair = file.mapToPair(new PairFunction<String, String, String>( ) { private static final long serialVersionUID = 1L; @Override public Tuple2<String, String> call(String t) throws Exception { String[] str = StringUtils.split(t,":"); if(str[1].length()>10){ Map map = new HashMap<>(); map.put(str[0], str[1]); accm.add(map); } return new Tuple2<String, String>(str[0], str[1]); }}); logger.info("Pair Count:: "+filePair.count()); logger.info("Accumulator Values:: "+accm.value()); |