Polynomial multiplication problem SOS

Hi guys,

My multiplication algorithm is not working correctly, it keeps giving me some non sense term. Any help would be appreciated.

Thanks,

Mike

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144

class Pol{
	
public:
	Pol(){size=0;pointer=new int[MAX];}
	Pol(const Pol& t){
		size=t.size;
		pointer=new int[size];
		for(int i=0;i<size;i++)
			*(pointer+i)=*(t.pointer+i);
	}
	~Pol(){
		delete[]pointer;
	}
	int get_longer(const int a, const int b){
		if(a>=b)return a;
		else return b;	
	}
	int get_shorter(const int a, const int b){
		if(a<=b)return a;
		else return b;
	}
	Pol& operator=(Pol& t){
		size=t.size;
		pointer=new int[size];
		for(int i=0;i<size;i++)
			*(pointer)=*(t.pointer);
		return *this;
	}
	Pol operator +(Pol& t){
		Pol sum;
		int shortest=get_shorter(t.size,size);
		int longest=get_longer(t.size,size);
		int difference=longest-shortest;
		int i;

		if(t.size<size){
			for(i=shortest;i<=difference+shortest;i++)
				t.pointer[i]=0;
		}
		else{
			for(i=shortest;i<=difference+shortest;i++)
				pointer[i]=0;
		}
		sum.pointer=new int[longest];
		sum.size=longest;
		for(i=0;i<sum.size;i++)
			*(sum.pointer+i)=*(pointer+i)+*(t.pointer+i);
		return sum;
	}
		Pol operator -(Pol& t){
		Pol sub;
		int shortest=get_shorter(t.size,size);
		int longest=get_longer(t.size,size);
		int difference=longest-shortest;
		int i;

		if(t.size<size){
			for(i=shortest;i<=difference+shortest;i++)
				t.pointer[i]=0;
		}
		else{
			for(i=shortest;i<=difference+shortest;i++)
				pointer[i]=0;
		}
		sub.pointer=new int[longest];
		sub.size=longest;
		for(i=0;i<sub.size;i++)
			*(sub.pointer+i)=*(pointer+i)-*(t.pointer+i);
		return sub;
	}
	Pol operator *(Pol& t){//problem here//gives extra nonsense term
		Pol solution;
		solution.size=t.size+size;
		solution.pointer=new int[solution.size-1];
		for(int i=0;i<t.size;i++)
			for(int j=0;j<size;j++)
				solution.pointer[i+j]=+(pointer[j]*t.pointer[i]);
		return solution;	
	}
	friend istream& operator>>(istream& in, Pol& t);
	friend ostream& operator<<(ostream& out, Pol& t);

private:
	int *pointer;
	int size;
};

int main(){

	Pol test, test1;
	cout<<"Enter first polynomial\n";
	cin>>test;
	cout<<"\nEnter second polynomial\n";
	cin>>test1;

	cout<<"\n\n"<<static_cast<char>(ALPHA)<<"(X)= "<<test<<endl;
	cout<<static_cast<char>(BETA)<<"(X)= "<<test1;

	cout<<"\n\nAddition\n";
	Pol addition=test+test1;
	cout<<static_cast<char>(THETA)<<"(X)= "<<addition;

	cout<<"\n\Subtraction\n";
	Pol subtraction=test-test1;
	cout<<static_cast<char>(OMEGA)<<"(X)= "<<subtraction;

	cout<<"\n\Multiplication\n";
	Pol multiplcation=test*test1;
	cout<<static_cast<char>(GAMMA)<<"(X)= "<<multiplcation;

	_getch();
	return 0;
}	

ostream& operator<<(ostream& out, Pol& t){
	for(int i=0;i<t.size;i++){
		if(*(t.pointer+i)>=0 && i!=0)
			out<<"+("<<*(t.pointer+i)<<"X"<<"^"<<i<<")";
		else
			out<<"-("<<abs(*(t.pointer+i))<<"X"<<"^"<<i<<")";
	}
	return out;
}
istream& operator>>(istream& in, Pol& t){
	char input[MAX];
	bool correct;
	int number;
	do{
		cout<<"Enter "<<t.size<<" co-efficient: ";
		in>>input;
		for(int i=0;i<strlen(input);i++){
			if(isdigit(input[i])){
				number=atoi(input);
				t.pointer[t.size]=number;
				t.size++;
				break;
			}
			else
				correct=false;	
		}
	}while(correct);
	return in;
}
I doubt anyone's going to debug your broken code for you. At least tell us where you suspect the problem is, or what you expected as output and what actually came out.
Everything works fine, the only issue is with the multiplication function.

examples go a long long way.

Can you please give us some example input and what output it's actually producing?

I tried to run the code and check myself, but it doesn't compile as-is (MAX, BETA, GAMMA, etc are undefined)
Apologies, I forgot to cut and paste the constants

1
2
3
4
5
6
7
const int MAX=1000;
const int ALPHA=224;
const int BETA=225;
const int THETA=233;
const int OMEGA=234;
const int GAMMA=226;


For example, if I put in for the first pol
4
2
3
10

and for the second pol
3
5

I get

-12x^0+20X^1+10X^2+15X^3+50X^4-33686019X^5



Topic archived. No new replies allowed.