全连接神经网络
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125import matplotlib.pyplot as pltimport numpy as npimport torchfrom torchvision.datasets import mnistimport torchvision.transforms as transformsfrom torch.utils.data import DataLoaderimport torch.nn.functional as Fimport torch. ...
master
slave1
slave2
NameNode
DataNode
DataNode
DataNode
ResourceManager
NodeManager
NodeManager
NodeManager
SecondaryNameNode
core-site.xml
12345678<property> <name>fs.defaultFS</name> <value>hdfs://master:8020</value> </property> <property> <name>hadoop.tmp.dir</name> <value>/opt/module/hadoop/data</value> </property>
hdfs-site.xml
12345678<property> <name>dfs.namenode.http-address< ...
大数据集群配置一、组件版本
Name
Version
Centos
7.9
Hadoop
2.7.7
Spark
2.1.1
Flink
1.10.2
Flume
1.7.0
Hive
2.3.4
Zookeeper
3.4.10
Sqoop
1.4.7
二、JDK1、解压12345[root@master software]# tar -zxvf jdk-8u161-linux-x64.tar.gz -C /opt/module/# 修改目录名称[root@master software]# cd ../module/[root@master module]# mv jdk1.8.0_161/ jdk
2、配置环境变量/etc/profile1234# JAVA_HOMEexport JAVA_HOME=/opt/module/jdkexport PATH=$PATH:$JAVA_HOME/binexport CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
激活环境变量 ...
1、下载并安装MySQL官方的 Yum Repository12[root@localhost ~]# wget -i -c http://dev.mysql.com/get/mysql57-community-release-el7-10.noarch.rpm
使用上面的命令就直接下载了安装用的Yum Repository,就可以直接安装了
12[root@localhost ~]# yum -y install mysql57-community-release-el7-10.noarch.rpm
安装mysql服务器
12[root@localhost ~]# yum -y install mysql-community-server
2、mysql设置1234567# 启动[root@localhost ~]# systemctl start mysqld.service# 在日志中查找初始密码[root@localhost ~]# grep "password" /var/log/mysqld.log
123456789101112131415 ...
数据分析numpy创建数组
np.array([1,2,3])
np.zeros() #数组的值都是0
np.ones() #数组的值都是1
np.full((3,5),3.14) #用某个数填满数组
np.arange(0,20,2) #从0到20(不包含),步长为2
np.linspace(0,1,5) #5个数均匀分配到0~1
np.random.random() #在0~1均匀分布的随机数组成的数组
np.random.normal(0,1,(3,3)) #均值为0、方差为1的正态分布随机数数组
np.random.randint(0,10,(3,3)) #[0,10)之间随机整数数组
np.eye(3) #创建一个3*3的单位矩阵
np.empty(3) #创建一个由3个整数组成的未初始化的数组,数组的值是内存空间中的任意值
数组属性
arr.ndim #数组的维度
arr.shape #数组每个维度的大小
arr.size #数组的总大小
arr.dtype #数组的数据类型
arr.itemsize #每个元素字节大小
arr.nbytes #数组总字节大小
数组切片 ...
MyBatisTest
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117package test;import cn.xiaojia521.bean.Employee;import cn.xiaojia521.dao.EmployeeMapper;import org.apache.ibatis.io.Resources;import org.apache.ibatis.session.SqlSession;import org.apache.ibatis.session.SqlSessionFactory;import org.apache.ibatis ...
依赖
123456789101112131415<dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-java</artifactId> <version>1.10.1</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-streaming-java_2.12</artifactId> <version>1.10.1</version> </dependency> <dependency> ...
依赖
12345<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka-0-10_2.11</artifactId> <version>${spark.version}</version> </dependency>
1234567891011121314151617181920212223242526272829303132333435363738package streamingimport org.apache.kafka.clients.consumer.ConsumerConfigimport org.apache.kafka.common.serialization.StringDeserializerimport org.apache.spark.streaming. ...
程序中代码:
123456789101112131415161718DataSet<Tuple2<String, Integer>> wordCountDataSet = inputDataSet.flatMap(new MyFlatMapper()) .groupBy(0) .sum(1); // 打印输出 wordCountDataSet.print(); } public static class MyFlatMapper implements FlatMapFunction<String, Tuple2<String,Integer>> { public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throwsException { String[] words = value.split(" "); for (String word : words) { o ...
123456789101112131415161718192021222324252621/04/21 14:40:00 ERROR Executor: Exception in task 0.0 in stage 16.0 (TID 22)java.lang.NoSuchMethodError: net.jpountz.lz4.LZ4BlockInputStream.<init>(Ljava/io/InputStream;Z)V at org.apache.spark.io.LZ4CompressionCodec.compressedInputStream(CompressionCodec.scala:122) at org.apache.spark.serializer.SerializerManager.wrapForCompression(SerializerManager.scala:163) at org.apache.spark.serializer.SerializerManager.wrapStream(SerializerManager.scala:12 ...