import warnings
warnings.filterwarnings('ignore')
import tensorflowas tf
#变量Variable常量constant
w=tf.Variable([[0.5,1.0]])#特征选取
x=tf.Variable([[2.0],[1.0]])#特征值
y=tf.matmul(a=w,b=x)#矩阵相乘
#设置全0矩阵最好设置数据类型为float32
a1=tf.zeros(shape=(4,3),dtype=tf.float32)
a2=tf.zeros_like(tensor=a1)
#设置全1矩阵也可以单独设置int类型
a3=tf.ones(shape=(4,3),dtype=tf.int32)
a4=tf.ones_like(a3)
#用值列表填充的常数一维张量 既标量scalar
t1=tf.constant([1,2,3,4,5])
#常数二维张量,填充标量值-1 既向量vector
t2=tf.constant(value=-1,shape=[3,4])
# print(t2)
#生成序列 start于10 stop于14 列表元素数量num=4 列表名name
t3=tf.lin_space(10.0,14.0,4,name='linspace')
# print(t3)
t4=tf.range(start=3,limit=18,delta=3)
# print(t4)
#生成正太分布随机序列 形状 均值 标准差
t5=tf.random_normal(shape=[3,3],mean=1.0,stddev=4.0)
#序列洗牌操作
c=tf.constant([[1,2],[3,4],[5,6],[7,8]])
t6=tf.random_shuffle(c)#对一个1维数组的随机化
int_op=tf.global_variables_initializer()#再运行操作之前,必须显示初始化的变量
#创建会话窗口
with tf.Session()as sess:
sess.run(int_op)
print(y.eval())
print(a1.eval())
print(a2.eval())
print(a3.eval())
print(a4.eval())
print(t1.eval())
print(t2.eval())
print(t3.eval())
print(t4.eval())
print(t5.eval())
print(t6.eval())#eval()可以把list,tuple,dict和string相互转化。
state=tf.Variable(0)#生成变量
new_value=tf.add(state,tf.constant(1))#state+1 tf.add()主要是两个数相加
update=tf.assign(state,new_value)#将新的值赋给state
#tf.assign()主要是把 new_value值赋给state
with tf.Session()as sess:
sess.run(tf.global_variables_initializer())
print(sess.run(state))
for iin range(3):
print(sess.run(update))
print(sess.run(state))
#保存变量模型train.Saver()
w=tf.Variable([[0.5,1.0]])
x=tf.Variable([[2.0],[1.0]])
y=tf.matmul(w,x)
int_op=tf.global_variables_initializer()
saver=tf.train.Saver()#保存模型
with tf.Session()as sess:
sess.run(int_op)
#对模型做一些操作 保存变量到磁盘
save_path=saver.save(sess,r"C:\Users\lenovo\Desktop\TensorFlow")
print('Model saved in file:',save_path)
#讲numpy转tensor格式
import numpyas np
a=np.zeros((3,3))
ta=tf.convert_to_tensor(a)
with tf.Session()as sess:
print(sess.run(ta))
input1=tf.placeholder(tf.float32)
input2=tf.placeholder(tf.float32)
input=tf.matmul(input1,input2)
with tf.Session()as sess:
print(sess.run(input,feed_dict={input1:[[7.0,6.0]],input2:[[2.0],[3.0]]}))
"""
1、tf.ones(shape,type=tf.float32,name=None)
tf.ones([2, 3], int32) ==> [[1, 1, 1], [1, 1, 1]]
2、tf.zeros(shape,type=tf.float32,name=None)
tf.zeros([2, 3], int32) ==> [[0, 0, 0], [0, 0, 0]]
3、tf.ones_like(tensor,dype=None,name=None)
新建一个与给定的tensor类型大小一致的tensor,其所有元素为1。
# 'tensor' is [[1, 2, 3], [4, 5, 6]]
tf.ones_like(tensor) ==> [[1, 1, 1], [1, 1, 1]]
4、tf.zeros_like(tensor,dype=None,name=None)
新建一个与给定的tensor类型大小一致的tensor,其所有元素为0。
# 'tensor' is [[1, 2, 3], [4, 5, 6]]
tf.ones_like(tensor) ==> [[0, 0, 0], [0, 0, 0]]
5、tf.fill(dim,value,name=None)
创建一个形状大小为dim的tensor,其初始值为value
# Output tensor has shape [2, 3].
fill([2, 3], 9) ==> [[9, 9, 9]
[9, 9, 9]]
6、tf.constant(value,dtype=None,shape=None,name='Const')
创建一个常量tensor,先给出value,可以设定其shape
# Constant 1-D Tensor populated with value list.
tensor = tf.constant([1, 2, 3, 4, 5, 6, 7]) => [1 2 3 4 5 6 7]
# Constant 2-D tensor populated with scalar value -1.
tensor = tf.constant(-1.0, shape=[2, 3]) => [[-1. -1. -1.] [-1. -1. -1.]
7、tf.linspace(start,stop,num,name=None)
返回一个tensor,该tensor中的数值在start到stop区间之间取等差数列(包含start和stop),如果num>1则差值为(stop-start)/(num-1),以保证最后一个元素的值为stop。
其中,start和stop必须为tf.float32或tf.float64。num的类型为int。
tf.linspace(10.0, 12.0, 3, name="linspace") => [ 10.0 11.0 12.0]
8、tf.range(start,limit=None,delta=1,name='range')
返回一个tensor等差数列,该tensor中的数值在start到limit之间,不包括limit,delta是等差数列的差值。
start,limit和delta都是int32类型。
# 'start' is 3
# 'limit' is 18
# 'delta' is 3
tf.range(start, limit, delta) ==> [3, 6, 9, 12, 15]
# 'limit' is 5 start is 0
tf.range(start, limit) ==> [0, 1, 2, 3, 4]
9、tf.random_normal(shape,mean=0.0,stddev=1.0,dtype=tf.float32,seed=None,name=None)
返回一个tensor其中的元素的值服从正态分布。
seed: A Python integer. Used to create a random seed for the distribution.See set_random_seed for behavior。
10、tf.truncated_normal(shape, mean=0.0, stddev=1.0, dtype=tf.float32, seed=None, name=None)
返回一个tensor其中的元素服从截断正态分布(?概念不懂,留疑)
11、tf.random_uniform(shape,minval=0,maxval=None,dtype=tf.float32,seed=None,name=None)
返回一个形状为shape的tensor,其中的元素服从minval和maxval之间的均匀分布。
12、tf.random_shuffle(value,seed=None,name=None)
对value(是一个tensor)的第一维进行随机化。
[[1,2], [[2,3],
[2,3], ==> [1,2],
[3,4]] [3,4]]
13、tf.set_random_seed(seed)
设置产生随机数的种子。
14、tf.assign(A, new_number):
这个函数的功能主要是把A的值变为new_number
15、dimension=0 按列找
dimension=1 按行找
tf.argmax()
返回最大数值的下标
通常和tf.equal()一起使用,计算模型准确度
16、tf.square()
example:tf.square(x)
对x内的所有数进行平方
17、tf.reduce_mean()
可跨越维度求张量各元素的均值
To generate different sequences across sessions, set neither graph-level nor op-level seeds:
a = tf.random_uniform([1])
b = tf.random_normal([1])
print("Session 1")
with tf.Session() as sess1:
print(sess1.run(a)) # generates 'A1'
print(sess1.run(a)) # generates 'A2'
print(sess1.run(b)) # generates 'B1'
print(sess1.run(b)) # generates 'B2'
print("Session 2")
with tf.Session() as sess2:
print(sess2.run(a)) # generates 'A3'
print(sess2.run(a)) # generates 'A4'
print(sess2.run(b)) # generates 'B3'
print(sess2.run(b)) # generates 'B4'
To generate the same repeatable sequence for an op across sessions, set the seed for the op:
a = tf.random_uniform([1], seed=1)
b = tf.random_normal([1])
# Repeatedly running this block with the same graph will generate the same
# sequence of values for 'a', but different sequences of values for 'b'.
print("Session 1")
with tf.Session() as sess1:
print(sess1.run(a)) # generates 'A1'
print(sess1.run(a)) # generates 'A2'
print(sess1.run(b)) # generates 'B1'
print(sess1.run(b)) # generates 'B2'
print("Session 2")
with tf.Session() as sess2:
print(sess2.run(a)) # generates 'A1'
print(sess2.run(a)) # generates 'A2'
print(sess2.run(b)) # generates 'B3'
print(sess2.run(b)) # generates 'B4'
To make the random sequences generated by all ops be repeatable across sessions, set a graph-level seed:
tf.set_random_seed(1234)
a = tf.random_uniform([1])
b = tf.random_normal([1])
# Repeatedly running this block with the same graph will generate different
# sequences of 'a' and 'b'.
print("Session 1")
with tf.Session() as sess1:
print(sess1.run(a)) # generates 'A1'
print(sess1.run(a)) # generates 'A2'
print(sess1.run(b)) # generates 'B1'
print(sess1.run(b)) # generates 'B2'
print("Session 2")
with tf.Session() as sess2:
print(sess2.run(a)) # generates 'A1'
print(sess2.run(a)) # generates 'A2'
print(sess2.run(b)) # generates 'B1'
print(sess2.run(b)) # generates 'B2
"""