from pyspark import SparkContext,SparkConf
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: wordcount <input>",file=sys.stderr)
sys.exit(-1) conf = SparkConf()
sc = SparkContext(conf=conf) counts = sc.textFile(sys.argv[1])\
.flatMap(lambda line:line.split(" "))\
.map(lambda x:(x,1))\
.reduceByKey(lambda a,b : a+b) output = counts.collect()
for (word,count) in output :
print("%s: %i" % (word,count)) sc.stop()
from pyspark import SparkContext,SparkConf
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: avg <input>",file = sys.stderr)
sys.exit(-1) conf = SparkConf()
sc = SparkContext(conf=conf)
ageData = sc.textFile(sys.argv[1]).map(lambda line:line.split(" ")[1])
totalAge = ageData.map(lambda x:int(x)).reduce(lambda a,b:a+b)
count = ageData.count()
avgAge = totalAge / count print("totalAge:%s"%totalAge)
print("count:%s"%count)
print("avgAge:%s"%avgAge) sc.stop()